Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

## [Unreleased]

### Fixed

- `install.sh` now falls back to pip when binary fails in devcontainers with older glibc (#456)
- Skills now deploy to all active targets (`.opencode/`, `.cursor/`) instead of only `.github/` (#456)
- `apm install` no longer rewrites `apm.lock.yaml` when dependencies are unchanged, eliminating `generated_at` churn in version control (#456)

## [0.8.5] - 2026-03-24

### Added
Expand Down
10 changes: 8 additions & 2 deletions install.sh
Original file line number Diff line number Diff line change
Expand Up @@ -373,9 +373,15 @@ fi
chmod +x "$TMP_DIR/$EXTRACTED_DIR/$BINARY_NAME"

# Test the binary
# Use if/else to capture exit code without triggering set -e.
# When glibc is too old the binary exits 255 immediately;
# we must survive that so the pip-fallback path below is reachable.
echo -e "${YELLOW}Testing binary...${NC}"
BINARY_TEST_OUTPUT=$("$TMP_DIR/$EXTRACTED_DIR/$BINARY_NAME" --version 2>&1)
BINARY_TEST_EXIT_CODE=$?
if BINARY_TEST_OUTPUT=$("$TMP_DIR/$EXTRACTED_DIR/$BINARY_NAME" --version 2>&1); then
BINARY_TEST_EXIT_CODE=0
else
BINARY_TEST_EXIT_CODE=$?
fi

if [ $BINARY_TEST_EXIT_CODE -eq 0 ]; then
echo -e "${GREEN}✓ Binary test successful${NC}"
Expand Down
53 changes: 35 additions & 18 deletions src/apm_cli/commands/install.py
Original file line number Diff line number Diff line change
Expand Up @@ -874,12 +874,20 @@ def _log_integration(msg):
package_info, project_root,
diagnostics=diagnostics, managed_files=managed_files, force=force,
)
# Build human-readable list of target dirs from deployed paths
_skill_target_dirs: set[str] = set()
for tp in skill_result.target_paths:
rel = tp.relative_to(project_root)
if rel.parts:
_skill_target_dirs.add(rel.parts[0])
_skill_targets = sorted(_skill_target_dirs)
_skill_target_str = ", ".join(f"{d}/skills/" for d in _skill_targets) or "skills/"
if skill_result.skill_created:
result["skills"] += 1
_log_integration(f" └─ Skill integrated -> .github/skills/")
_log_integration(f" |-- Skill integrated -> {_skill_target_str}")
if skill_result.sub_skills_promoted > 0:
result["sub_skills"] += skill_result.sub_skills_promoted
_log_integration(f" └─ {skill_result.sub_skills_promoted} skill(s) integrated -> .github/skills/")
_log_integration(f" |-- {skill_result.sub_skills_promoted} skill(s) integrated -> {_skill_target_str}")
Comment on lines +887 to +890
Copy link

Copilot AI Mar 25, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

These skill integration tree lines use a different ASCII prefix ("|--") than the rest of the install integration output in this function (which uses "└─"). This makes the output formatting inconsistent; consider standardizing on one prefix (preferably ASCII everywhere if following the encoding rule) and updating all related tree_item call sites accordingly.

Copilot uses AI. Check for mistakes.
for tp in skill_result.target_paths:
deployed.append(tp.relative_to(project_root).as_posix())

Expand Down Expand Up @@ -1302,19 +1310,21 @@ def _collect_descendants(node, visited=None):
# Get config target from apm.yml if available
config_target = apm_package.target

# Auto-create .github/ if neither .github/ nor .claude/ exists.
# Per skill-strategy Decision 1, .github/skills/ is the standard skills location;
# creating .github/ here ensures a consistent skills root and also enables
# VSCode/Copilot integration by default (quick path to value), even for
# projects that don't yet use .claude/.
github_dir = project_root / GITHUB_DIR
claude_dir = project_root / CLAUDE_DIR
if not github_dir.exists() and not claude_dir.exists():
github_dir.mkdir(parents=True, exist_ok=True)
if logger:
logger.verbose_detail(
"Created .github/ as standard skills root (.github/skills/) and to enable VSCode/Copilot integration"
)
# Ensure auto_create targets exist.
# Copilot (.github) has auto_create=True -- it is always created so
# there is a guaranteed skills root even for greenfield projects.
from apm_cli.integration.targets import active_targets as _active_targets

_targets = _active_targets(project_root)
for _t in _targets:
if _t.auto_create:
_target_dir = project_root / _t.root_dir
if not _target_dir.exists():
_target_dir.mkdir(parents=True, exist_ok=True)
if logger:
logger.verbose_detail(
f"Created {_t.root_dir}/ ({_t.name} target)"
)
Comment on lines +1313 to +1327
Copy link

Copilot AI Mar 25, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The new auto-create loop runs before target detection and uses active_targets(), which always includes the Copilot profile (detect_by_dir=False, auto_create=True). This means .github/ will still be created even when the repo only has .opencode/ or .cursor/, so it does not actually address the reported behavior in #453. Consider moving auto-create to after detect_target()/should_integrate_* and only creating root dirs for targets that will be integrated (e.g., only create .github/ when integrate_vscode is true).

This issue also appears on line 1312 of the same file.

Copilot uses AI. Check for mistakes.

detected_target, detection_reason = detect_target(
project_root=project_root,
Expand Down Expand Up @@ -2109,9 +2119,16 @@ def _collect_descendants(node, visited=None):
existing.add_dependency(dep)
lockfile = existing

lockfile.save(lockfile_path)
if logger:
logger.verbose_detail(f"Generated apm.lock.yaml with {len(lockfile.dependencies)} dependencies")
# Only write when the semantic content has actually changed
# (avoids generated_at churn in version control).
existing_lockfile = LockFile.read(lockfile_path) if lockfile_path.exists() else None
if existing_lockfile and lockfile.is_semantically_equivalent(existing_lockfile):
if logger:
logger.verbose_detail("apm.lock.yaml unchanged -- skipping write")
else:
lockfile.save(lockfile_path)
if logger:
logger.verbose_detail(f"Generated apm.lock.yaml with {len(lockfile.dependencies)} dependencies")
except Exception as e:
_lock_msg = f"Could not generate apm.lock.yaml: {e}"
diagnostics.error(_lock_msg)
Expand Down
20 changes: 20 additions & 0 deletions src/apm_cli/deps/lockfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,6 +302,26 @@ def save(self, path: Path) -> None:
"""Save lock file to disk (alias for write)."""
self.write(path)

def is_semantically_equivalent(self, other: "LockFile") -> bool:
"""Return True if *other* has the same deps, MCP servers, and configs.

Ignores ``generated_at`` and ``apm_version`` so that a no-change
install does not dirty the lockfile.
"""
if self.lockfile_version != other.lockfile_version:
return False
if set(self.dependencies.keys()) != set(other.dependencies.keys()):
return False
for key, dep in self.dependencies.items():
other_dep = other.dependencies[key]
if dep.to_dict() != other_dep.to_dict():
return False
if sorted(self.mcp_servers) != sorted(other.mcp_servers):
return False
if self.mcp_configs != other.mcp_configs:
return False
return True

@classmethod
def installed_paths_for_project(cls, project_root: Path) -> List[str]:
"""Load apm.lock.yaml from project_root and return installed paths.
Expand Down
Loading
Loading