diff --git a/CHANGELOG.md b/CHANGELOG.md index 53ed31e7..b95ab92f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +### Fixed + +- `install.sh` now falls back to pip when binary fails in devcontainers with older glibc (#456) +- Skills now deploy to all active targets (`.opencode/`, `.cursor/`) instead of only `.github/` (#456) +- `apm install` no longer rewrites `apm.lock.yaml` when dependencies are unchanged, eliminating `generated_at` churn in version control (#456) + ## [0.8.5] - 2026-03-24 ### Added diff --git a/install.sh b/install.sh index fdf165ba..7d474273 100755 --- a/install.sh +++ b/install.sh @@ -373,9 +373,15 @@ fi chmod +x "$TMP_DIR/$EXTRACTED_DIR/$BINARY_NAME" # Test the binary +# Use if/else to capture exit code without triggering set -e. +# When glibc is too old the binary exits 255 immediately; +# we must survive that so the pip-fallback path below is reachable. echo -e "${YELLOW}Testing binary...${NC}" -BINARY_TEST_OUTPUT=$("$TMP_DIR/$EXTRACTED_DIR/$BINARY_NAME" --version 2>&1) -BINARY_TEST_EXIT_CODE=$? +if BINARY_TEST_OUTPUT=$("$TMP_DIR/$EXTRACTED_DIR/$BINARY_NAME" --version 2>&1); then + BINARY_TEST_EXIT_CODE=0 +else + BINARY_TEST_EXIT_CODE=$? +fi if [ $BINARY_TEST_EXIT_CODE -eq 0 ]; then echo -e "${GREEN}✓ Binary test successful${NC}" diff --git a/src/apm_cli/commands/install.py b/src/apm_cli/commands/install.py index 100e4586..c145a4b3 100644 --- a/src/apm_cli/commands/install.py +++ b/src/apm_cli/commands/install.py @@ -874,12 +874,20 @@ def _log_integration(msg): package_info, project_root, diagnostics=diagnostics, managed_files=managed_files, force=force, ) + # Build human-readable list of target dirs from deployed paths + _skill_target_dirs: set[str] = set() + for tp in skill_result.target_paths: + rel = tp.relative_to(project_root) + if rel.parts: + _skill_target_dirs.add(rel.parts[0]) + _skill_targets = sorted(_skill_target_dirs) + _skill_target_str = ", ".join(f"{d}/skills/" for d in _skill_targets) or "skills/" if skill_result.skill_created: result["skills"] += 1 - _log_integration(f" └─ Skill integrated -> .github/skills/") + _log_integration(f" |-- Skill integrated -> {_skill_target_str}") if skill_result.sub_skills_promoted > 0: result["sub_skills"] += skill_result.sub_skills_promoted - _log_integration(f" └─ {skill_result.sub_skills_promoted} skill(s) integrated -> .github/skills/") + _log_integration(f" |-- {skill_result.sub_skills_promoted} skill(s) integrated -> {_skill_target_str}") for tp in skill_result.target_paths: deployed.append(tp.relative_to(project_root).as_posix()) @@ -1302,19 +1310,21 @@ def _collect_descendants(node, visited=None): # Get config target from apm.yml if available config_target = apm_package.target - # Auto-create .github/ if neither .github/ nor .claude/ exists. - # Per skill-strategy Decision 1, .github/skills/ is the standard skills location; - # creating .github/ here ensures a consistent skills root and also enables - # VSCode/Copilot integration by default (quick path to value), even for - # projects that don't yet use .claude/. - github_dir = project_root / GITHUB_DIR - claude_dir = project_root / CLAUDE_DIR - if not github_dir.exists() and not claude_dir.exists(): - github_dir.mkdir(parents=True, exist_ok=True) - if logger: - logger.verbose_detail( - "Created .github/ as standard skills root (.github/skills/) and to enable VSCode/Copilot integration" - ) + # Ensure auto_create targets exist. + # Copilot (.github) has auto_create=True -- it is always created so + # there is a guaranteed skills root even for greenfield projects. + from apm_cli.integration.targets import active_targets as _active_targets + + _targets = _active_targets(project_root) + for _t in _targets: + if _t.auto_create: + _target_dir = project_root / _t.root_dir + if not _target_dir.exists(): + _target_dir.mkdir(parents=True, exist_ok=True) + if logger: + logger.verbose_detail( + f"Created {_t.root_dir}/ ({_t.name} target)" + ) detected_target, detection_reason = detect_target( project_root=project_root, @@ -2109,9 +2119,16 @@ def _collect_descendants(node, visited=None): existing.add_dependency(dep) lockfile = existing - lockfile.save(lockfile_path) - if logger: - logger.verbose_detail(f"Generated apm.lock.yaml with {len(lockfile.dependencies)} dependencies") + # Only write when the semantic content has actually changed + # (avoids generated_at churn in version control). + existing_lockfile = LockFile.read(lockfile_path) if lockfile_path.exists() else None + if existing_lockfile and lockfile.is_semantically_equivalent(existing_lockfile): + if logger: + logger.verbose_detail("apm.lock.yaml unchanged -- skipping write") + else: + lockfile.save(lockfile_path) + if logger: + logger.verbose_detail(f"Generated apm.lock.yaml with {len(lockfile.dependencies)} dependencies") except Exception as e: _lock_msg = f"Could not generate apm.lock.yaml: {e}" diagnostics.error(_lock_msg) diff --git a/src/apm_cli/deps/lockfile.py b/src/apm_cli/deps/lockfile.py index 521a842e..f5a12b59 100644 --- a/src/apm_cli/deps/lockfile.py +++ b/src/apm_cli/deps/lockfile.py @@ -302,6 +302,26 @@ def save(self, path: Path) -> None: """Save lock file to disk (alias for write).""" self.write(path) + def is_semantically_equivalent(self, other: "LockFile") -> bool: + """Return True if *other* has the same deps, MCP servers, and configs. + + Ignores ``generated_at`` and ``apm_version`` so that a no-change + install does not dirty the lockfile. + """ + if self.lockfile_version != other.lockfile_version: + return False + if set(self.dependencies.keys()) != set(other.dependencies.keys()): + return False + for key, dep in self.dependencies.items(): + other_dep = other.dependencies[key] + if dep.to_dict() != other_dep.to_dict(): + return False + if sorted(self.mcp_servers) != sorted(other.mcp_servers): + return False + if self.mcp_configs != other.mcp_configs: + return False + return True + @classmethod def installed_paths_for_project(cls, project_root: Path) -> List[str]: """Load apm.lock.yaml from project_root and return installed paths. diff --git a/src/apm_cli/integration/skill_integrator.py b/src/apm_cli/integration/skill_integrator.py index 4335a46d..f4cbb8d7 100644 --- a/src/apm_cli/integration/skill_integrator.py +++ b/src/apm_cli/integration/skill_integrator.py @@ -585,9 +585,9 @@ def _promote_sub_skills_standalone( """Promote sub-skills from a package that is NOT itself a skill. Packages typed as INSTRUCTIONS may still ship sub-skills under - ``.apm/skills/``. This method promotes them to ``.github/skills/`` - (and ``.claude/skills/`` when present) without creating a top-level - skill entry for the parent package. + ``.apm/skills/``. This method promotes them to all active targets + that support skills, without creating a top-level skill entry for + the parent package. Args: package_info: PackageInfo object with package metadata. @@ -601,41 +601,34 @@ def _promote_sub_skills_standalone( if not sub_skills_dir.is_dir(): return 0, [] + from apm_cli.integration.targets import active_targets + parent_name = package_path.name - github_skills_root = project_root / ".github" / "skills" - github_skills_root.mkdir(parents=True, exist_ok=True) owned_by = self._build_skill_ownership_map(project_root) - count, deployed = self._promote_sub_skills( - sub_skills_dir, github_skills_root, parent_name, warn=True, owned_by=owned_by, diagnostics=diagnostics, managed_files=managed_files, force=force, project_root=project_root - ) - all_deployed = list(deployed) - - # Also promote into .claude/skills/ when .claude/ exists - claude_dir = project_root / ".claude" - if claude_dir.exists() and claude_dir.is_dir(): - claude_skills_root = claude_dir / "skills" - _, claude_deployed = self._promote_sub_skills( - sub_skills_dir, claude_skills_root, parent_name, warn=False, project_root=project_root - ) - all_deployed.extend(claude_deployed) - - # Also promote into .cursor/skills/ when .cursor/ exists - cursor_dir = project_root / ".cursor" - if cursor_dir.exists() and cursor_dir.is_dir(): - cursor_skills_root = cursor_dir / "skills" - _, cursor_deployed = self._promote_sub_skills( - sub_skills_dir, cursor_skills_root, parent_name, warn=False, project_root=project_root - ) - all_deployed.extend(cursor_deployed) - - # Also promote into .opencode/skills/ when .opencode/ exists - opencode_dir = project_root / ".opencode" - if opencode_dir.exists() and opencode_dir.is_dir(): - opencode_skills_root = opencode_dir / "skills" - _, opencode_deployed = self._promote_sub_skills( - sub_skills_dir, opencode_skills_root, parent_name, warn=False, project_root=project_root + targets = active_targets(project_root) + count = 0 + all_deployed: list[Path] = [] + + for target in targets: + if not target.supports("skills"): + continue + + is_primary = not target.detect_by_dir + target_skills_root = project_root / target.root_dir / "skills" + target_skills_root.mkdir(parents=True, exist_ok=True) + + n, deployed = self._promote_sub_skills( + sub_skills_dir, target_skills_root, parent_name, + warn=is_primary, + owned_by=owned_by if is_primary else None, + diagnostics=diagnostics if is_primary else None, + managed_files=managed_files if is_primary else None, + force=force, + project_root=project_root, ) - all_deployed.extend(opencode_deployed) + if is_primary: + count = n + all_deployed.extend(deployed) return count, all_deployed @@ -644,23 +637,18 @@ def _integrate_native_skill( diagnostics=None, managed_files=None, force: bool = False, logger=None, ) -> SkillIntegrationResult: - """Copy a native Skill (with existing SKILL.md) to .github/skills/ and optionally .claude/skills/ and .cursor/skills/. + """Copy a native Skill (with existing SKILL.md) to all active targets. For packages that already have a SKILL.md at their root (like those from - awesome-claude-skills), we copy the entire skill folder rather than - regenerating from .apm/ primitives. + awesome-claude-skills), we copy the entire skill folder to every active + target that supports skills (driven by ``active_targets()``). - The skill folder name is the source folder name (e.g., `mcp-builder`), + The skill folder name is the source folder name (e.g., ``mcp-builder``), validated and normalized per the agentskills.io spec. - Source SKILL.md is copied verbatim -- no metadata injection. Orphan + Source SKILL.md is copied verbatim -- no metadata injection. Orphan detection uses apm.lock via directory name matching instead. - T7 Enhancement: Also copies to .claude/skills/ when .claude/ folder exists - and to .cursor/skills/ when .cursor/ folder exists. - This ensures Claude Code and Cursor users get skills while not polluting - projects that don't use those tools. - Copies: - SKILL.md (required) - scripts/ (optional) @@ -707,81 +695,70 @@ def _integrate_native_skill( except ImportError: pass # CLI not available in tests - # Primary target: .github/skills/ - github_skill_dir = project_root / ".github" / "skills" / skill_name - github_skill_md = github_skill_dir / "SKILL.md" - - # Always copy -- source integrity is preserved, orphan detection uses apm.lock - skill_created = not github_skill_dir.exists() - skill_updated = not skill_created - + # Deploy to all active targets that support skills. + # Copilot (.github) is always active (detect_by_dir=False). + # Others (.claude, .cursor, .opencode) are active when their dir exists. + from apm_cli.integration.targets import active_targets + + targets = active_targets(project_root) + skill_created = False + skill_updated = False files_copied = 0 - claude_skill_dir: Path | None = None - - # === Copy to .github/skills/ (primary) === - if github_skill_dir.exists(): - shutil.rmtree(github_skill_dir) - - github_skill_dir.parent.mkdir(parents=True, exist_ok=True) - shutil.copytree(package_path, github_skill_dir, - ignore=shutil.ignore_patterns('.apm')) - - files_copied = sum(1 for _ in github_skill_dir.rglob('*') if _.is_file()) - - # Track deployed paths - all_target_paths = [github_skill_dir] - - # === Promote sub-skills to top-level entries === - # Packages may contain sub-skills in .apm/skills/*/ subdirectories. - # Copilot only discovers .github/skills//SKILL.md (direct children), - # so we promote each sub-skill to an independent top-level entry. - sub_skills_dir = package_path / ".apm" / "skills" - github_skills_root = project_root / ".github" / "skills" + all_target_paths: list[Path] = [] + primary_skill_md: Path | None = None + owned_by = self._build_skill_ownership_map(project_root) - sub_skills_count, sub_deployed = self._promote_sub_skills(sub_skills_dir, github_skills_root, skill_name, warn=True, owned_by=owned_by, diagnostics=diagnostics, managed_files=managed_files, force=force, project_root=project_root, logger=logger) - all_target_paths.extend(sub_deployed) - - # === T7: Copy to .claude/skills/ (secondary - compatibility) === - claude_dir = project_root / ".claude" - if claude_dir.exists() and claude_dir.is_dir(): - claude_skill_dir = claude_dir / "skills" / skill_name - - if claude_skill_dir.exists(): - shutil.rmtree(claude_skill_dir) - - claude_skill_dir.parent.mkdir(parents=True, exist_ok=True) - shutil.copytree(package_path, claude_skill_dir, - ignore=shutil.ignore_patterns('.apm')) - all_target_paths.append(claude_skill_dir) - - # Promote sub-skills for Claude too - claude_skills_root = claude_dir / "skills" - _, claude_sub_deployed = self._promote_sub_skills(sub_skills_dir, claude_skills_root, skill_name, warn=False, project_root=project_root) - all_target_paths.extend(claude_sub_deployed) - - # === Copy to .cursor/skills/ (tertiary - compatibility) === - cursor_dir = project_root / ".cursor" - if cursor_dir.exists() and cursor_dir.is_dir(): - cursor_skill_dir = cursor_dir / "skills" / skill_name - - if cursor_skill_dir.exists(): - shutil.rmtree(cursor_skill_dir) - - cursor_skill_dir.parent.mkdir(parents=True, exist_ok=True) - shutil.copytree(package_path, cursor_skill_dir, + sub_skills_dir = package_path / ".apm" / "skills" + + for target in targets: + if not target.supports("skills"): + continue + + is_primary = not target.detect_by_dir # copilot = primary + target_skill_dir = project_root / target.root_dir / "skills" / skill_name + + if is_primary: + skill_created = not target_skill_dir.exists() + skill_updated = not skill_created + primary_skill_md = target_skill_dir / "SKILL.md" + + if target_skill_dir.exists(): + shutil.rmtree(target_skill_dir) + + target_skill_dir.parent.mkdir(parents=True, exist_ok=True) + shutil.copytree(package_path, target_skill_dir, ignore=shutil.ignore_patterns('.apm')) - all_target_paths.append(cursor_skill_dir) - - # Promote sub-skills for Cursor too - cursor_skills_root = cursor_dir / "skills" - _, cursor_sub_deployed = self._promote_sub_skills(sub_skills_dir, cursor_skills_root, skill_name, warn=False, project_root=project_root) - all_target_paths.extend(cursor_sub_deployed) + all_target_paths.append(target_skill_dir) + + if is_primary: + files_copied = sum(1 for _ in target_skill_dir.rglob('*') if _.is_file()) + + # Promote sub-skills for this target + target_skills_root = project_root / target.root_dir / "skills" + _, sub_deployed = self._promote_sub_skills( + sub_skills_dir, target_skills_root, skill_name, + warn=is_primary, + owned_by=owned_by if is_primary else None, + diagnostics=diagnostics if is_primary else None, + managed_files=managed_files if is_primary else None, + force=force, + project_root=project_root, + logger=logger if is_primary else None, + ) + all_target_paths.extend(sub_deployed) + + # Count unique sub-skills from primary target only + primary_root = project_root / ".github" / "skills" + sub_skills_count = sum( + 1 for p in all_target_paths + if p.parent == primary_root and p.name != skill_name + ) return SkillIntegrationResult( skill_created=skill_created, skill_updated=skill_updated, skill_skipped=False, - skill_path=github_skill_md, + skill_path=primary_skill_md, references_copied=files_copied, links_resolved=0, sub_skills_promoted=sub_skills_count, @@ -789,12 +766,15 @@ def _integrate_native_skill( ) def integrate_package_skill(self, package_info, project_root: Path, diagnostics=None, managed_files=None, force: bool = False, logger=None) -> SkillIntegrationResult: - """Integrate a package's skill into .github/skills/ directory. + """Integrate a package's skill into all active target directories. + + Copies native skills (packages with SKILL.md at root) to every active + target that supports skills (e.g. .github/skills/, .claude/skills/, + .opencode/skills/). Also promotes any sub-skills from .apm/skills/. - Copies native skills (packages with SKILL.md at root) to .github/skills/ - and optionally .claude/skills/ and .cursor/skills/. Also promotes any sub-skills from .apm/skills/. + Target selection is driven by ``active_targets()`` from ``targets.py``. - Packages without SKILL.md at root are not installed as skills -- only their + Packages without SKILL.md at root are not installed as skills -- only their sub-skills (if any) are promoted. Args: diff --git a/src/apm_cli/utils/path_security.py b/src/apm_cli/utils/path_security.py index acbb40ec..b556112f 100644 --- a/src/apm_cli/utils/path_security.py +++ b/src/apm_cli/utils/path_security.py @@ -6,7 +6,7 @@ Design ------ -* ``ensure_path_within`` is the single predicate — resolves both paths and +* ``ensure_path_within`` is the single predicate -- resolves both paths and asserts containment via ``Path.is_relative_to``. * ``safe_rmtree`` wraps ``shutil.rmtree`` with an ``ensure_path_within`` check so callers get a drop-in replacement. diff --git a/tests/test_lockfile.py b/tests/test_lockfile.py index 5e32d798..701ca6a5 100644 --- a/tests/test_lockfile.py +++ b/tests/test_lockfile.py @@ -217,3 +217,71 @@ def test_migrated_file_is_readable(self, tmp_path): loaded = LockFile.read(tmp_path / "apm.lock.yaml") assert loaded is not None assert loaded.has_dependency("owner/repo") + + +class TestLockFileSemanticEquivalence: + """Tests for LockFile.is_semantically_equivalent().""" + + def _make_lock(self, **overrides): + lock = LockFile( + lockfile_version="1", + generated_at="2025-01-01T00:00:00+00:00", + apm_version="0.8.5", + ) + lock.add_dependency(LockedDependency( + repo_url="owner/repo", resolved_commit="abc123", depth=0, + )) + for k, v in overrides.items(): + setattr(lock, k, v) + return lock + + def test_identical_is_equivalent(self): + a = self._make_lock() + b = self._make_lock() + assert a.is_semantically_equivalent(b) + + def test_different_generated_at_still_equivalent(self): + a = self._make_lock(generated_at="2025-01-01T00:00:00+00:00") + b = self._make_lock(generated_at="2025-06-15T12:00:00+00:00") + assert a.is_semantically_equivalent(b) + + def test_different_apm_version_still_equivalent(self): + a = self._make_lock(apm_version="0.8.5") + b = self._make_lock(apm_version="0.9.0") + assert a.is_semantically_equivalent(b) + + def test_added_dependency_not_equivalent(self): + a = self._make_lock() + b = self._make_lock() + b.add_dependency(LockedDependency(repo_url="other/pkg", depth=1)) + assert not a.is_semantically_equivalent(b) + + def test_removed_dependency_not_equivalent(self): + a = self._make_lock() + b = LockFile() + assert not a.is_semantically_equivalent(b) + + def test_changed_mcp_servers_not_equivalent(self): + a = self._make_lock(mcp_servers=["server-a"]) + b = self._make_lock(mcp_servers=["server-b"]) + assert not a.is_semantically_equivalent(b) + + def test_mcp_server_order_irrelevant(self): + a = self._make_lock(mcp_servers=["b", "a"]) + b = self._make_lock(mcp_servers=["a", "b"]) + assert a.is_semantically_equivalent(b) + + def test_changed_mcp_configs_not_equivalent(self): + a = self._make_lock(mcp_configs={"s": {"cmd": "a"}}) + b = self._make_lock(mcp_configs={"s": {"cmd": "b"}}) + assert not a.is_semantically_equivalent(b) + + def test_changed_lockfile_version_not_equivalent(self): + a = self._make_lock(lockfile_version="1") + b = self._make_lock(lockfile_version="2") + assert not a.is_semantically_equivalent(b) + + def test_new_lockfile_vs_empty(self): + a = self._make_lock() + b = LockFile() + assert not a.is_semantically_equivalent(b)