Skip to content

Commit 9451c2d

Browse files
committed
ci: rebuild CI & publish workflows; code/test fixes for CI compatibility
1 parent ab14083 commit 9451c2d

26 files changed

Lines changed: 225 additions & 172 deletions

.github/workflows/ci.yml

Lines changed: 15 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -2,57 +2,52 @@ name: CI
22

33
on:
44
push:
5-
branches:
6-
- main
5+
branches: [main]
76
pull_request:
8-
branches:
9-
- main
7+
branches: [main]
108

119
jobs:
12-
build-and-test:
13-
name: Test with Python ${{ matrix.python-version }}
10+
quality:
11+
name: Quality checks (Python ${{ matrix.python-version }})
1412
runs-on: ubuntu-latest
1513
strategy:
16-
fail-fast: false
1714
matrix:
18-
python-version: ["3.10", "3.11", "3.12", "3.13"]
19-
15+
python-version: ["3.10", "3.11", "3.12"]
16+
2017
steps:
2118
- name: Checkout repository
2219
uses: actions/checkout@v4
2320

24-
- name: Setup Python
21+
- name: Set up Python ${{ matrix.python-version }}
2522
uses: actions/setup-python@v5
2623
with:
2724
python-version: ${{ matrix.python-version }}
2825

29-
- name: Cache pip dependencies
26+
- name: Cache pip downloads
3027
uses: actions/cache@v4
3128
with:
3229
path: ~/.cache/pip
3330
key: ${{ runner.os }}-pip-${{ hashFiles('pyproject.toml') }}
3431
restore-keys: |
3532
${{ runner.os }}-pip-
3633
37-
- name: Install project and dev dependencies
34+
- name: Install project and development dependencies
3835
run: |
3936
python -m pip install --upgrade pip
40-
# Installing required tools for the CI steps
41-
pip install ruff mypy vulture pytest build
42-
# Install the project itself in editable mode so tools can find it if needed
4337
pip install -e .
38+
pip install pytest pytest-cov ruff mypy vulture build
4439
45-
- name: Run linting
40+
- name: Lint (ruff)
4641
run: ruff check src
4742

48-
- name: Run type checking
43+
- name: Type check (mypy)
4944
run: mypy src
5045

51-
- name: Run dead code detection
46+
- name: Dead code detection (vulture)
5247
run: vulture src --min-confidence 80
5348

54-
- name: Run test suite
49+
- name: Test suite (pytest)
5550
run: pytest -v
5651

57-
- name: Build package
52+
- name: Build package (PEP 517)
5853
run: python -m build

.github/workflows/publish.yml

Lines changed: 8 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -6,32 +6,29 @@ on:
66

77
jobs:
88
publish:
9-
name: Build and Publish
9+
name: Build and publish package
1010
runs-on: ubuntu-latest
11-
12-
# Required permissions for PyPI Trusted Publishing
11+
1312
permissions:
14-
id-token: write # This permission is required for trusted publishing
15-
contents: read # Required to checkout the repository
13+
id-token: write
14+
contents: read
1615

1716
steps:
1817
- name: Checkout repository
1918
uses: actions/checkout@v4
2019

21-
- name: Setup Python
20+
- name: Set up Python 3.12
2221
uses: actions/setup-python@v5
2322
with:
24-
python-version: "3.12" # Can use any supported stable version for the build
23+
python-version: "3.12"
2524

26-
- name: Install build tools
25+
- name: Install build dependencies
2726
run: |
2827
python -m pip install --upgrade pip
2928
pip install build
3029
31-
- name: Build package
30+
- name: Build distribution artifacts
3231
run: python -m build
3332

34-
# Publish using PyPI Trusted Publishing.
35-
# Note: No API token or secret is needed securely provided the repository is linked in PyPI settings
3633
- name: Publish to PyPI
3734
uses: pypa/gh-action-pypi-publish@release/v1

src/codectx/cache.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ def _load(self) -> None:
2727
cache_file = self.cache_dir / "cache.json"
2828
if cache_file.is_file():
2929
try:
30-
with open(cache_file, "r", encoding="utf-8") as f:
30+
with open(cache_file, encoding="utf-8") as f:
3131
self._data = json.load(f)
3232
except (json.JSONDecodeError, OSError) as exc:
3333
logger.warning("Cache load failed: %s", exc)
@@ -121,7 +121,7 @@ def export_cache(self, output: Path) -> None:
121121
logger.info("Cache exported to %s", output)
122122

123123
@classmethod
124-
def import_cache(cls, archive: Path, root: Path) -> "Cache":
124+
def import_cache(cls, archive: Path, root: Path) -> Cache:
125125
"""Import cache from a tar.gz archive.
126126
127127
Args:

src/codectx/cli.py

Lines changed: 39 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -6,16 +6,14 @@
66
import sys
77
import time
88
from pathlib import Path
9-
from typing import Optional
109

1110
import typer
1211
from rich.console import Console
1312
from rich.panel import Panel
1413
from rich.progress import Progress, SpinnerColumn, TextColumn
1514

16-
from codectx.config.defaults import CACHE_DIR_NAME
17-
1815
from codectx import __version__
16+
from codectx.config.defaults import CACHE_DIR_NAME
1917

2018
app = typer.Typer(
2119
name="codectx",
@@ -47,7 +45,7 @@ def analyze(
4745
"-o",
4846
help="Output file path (default: CONTEXT.md).",
4947
),
50-
since: Optional[str] = typer.Option(
48+
since: str | None = typer.Option(
5149
None,
5250
"--since",
5351
help="Include recent changes since this date (e.g. '7 days ago').",
@@ -63,7 +61,7 @@ def analyze(
6361
"--no-git",
6462
help="Skip git metadata collection.",
6563
),
66-
query: Optional[str] = typer.Option(
64+
query: str | None = typer.Option(
6765
None,
6866
"--query",
6967
"-q",
@@ -79,7 +77,7 @@ def analyze(
7977
"--layers",
8078
help="Generate layered context output.",
8179
),
82-
extra_roots: Optional[list[Path]] = typer.Option(
80+
extra_roots: list[Path] | None = typer.Option(
8381
None,
8482
"--extra-root",
8583
help="Additional root directories for multi-root analysis.",
@@ -180,7 +178,7 @@ def benchmark(
180178

181179
# Rank
182180
t0 = time.perf_counter()
183-
from codectx.ranker.git_meta import collect_git_metadata, collect_recent_changes
181+
from codectx.ranker.git_meta import collect_git_metadata
184182
from codectx.ranker.scorer import score_files
185183

186184
git_meta = collect_git_metadata(files, config.root, config.no_git)
@@ -299,22 +297,26 @@ def search(
299297
) -> None:
300298
"""Search the codebase semantically."""
301299
_setup_logging(verbose)
302-
300+
303301
try:
304302
from codectx.ranker.semantic import is_available, semantic_score
303+
305304
if not is_available():
306-
console.print("[red]Semantic search is not installed. Run: pip install codectx[semantic][/]")
305+
console.print(
306+
"[red]Semantic search is not installed. Run: pip install codectx[semantic][/]"
307+
)
307308
raise typer.Exit(1)
308-
309-
from codectx.walker import walk
310-
from codectx.parser.treesitter import parse_files
311-
from codectx.config.loader import load_config
309+
312310
import hashlib
311+
313312
from codectx.cache import Cache
314-
313+
from codectx.config.loader import load_config
314+
from codectx.parser.treesitter import parse_files
315+
from codectx.walker import walk
316+
315317
config = load_config(root)
316318
files = walk(config.root, config.extra_ignore)
317-
319+
318320
# Parse files with cache
319321
cache = Cache(config.root)
320322
parse_results = {}
@@ -329,9 +331,13 @@ def search(
329331
parse_results[f] = cached
330332
else:
331333
uncached_files.append(f)
332-
334+
333335
if uncached_files:
334-
with Progress(SpinnerColumn(), TextColumn("[progress.description]{task.description}"), transient=True) as progress:
336+
with Progress(
337+
SpinnerColumn(),
338+
TextColumn("[progress.description]{task.description}"),
339+
transient=True,
340+
) as progress:
335341
progress.add_task("Parsing uncached files...", total=None)
336342
fresh = parse_files(uncached_files)
337343
for f, result in fresh.items():
@@ -342,28 +348,30 @@ def search(
342348
fhash = ""
343349
cache.put_parse_result(f, fhash, result)
344350
cache.save()
345-
351+
346352
cache_dir = config.root / ".codectx_cache" / "embeddings"
347353
cache_dir.mkdir(parents=True, exist_ok=True)
348-
349-
with Progress(SpinnerColumn(), TextColumn("[progress.description]{task.description}"), transient=True) as progress:
354+
355+
with Progress(
356+
SpinnerColumn(), TextColumn("[progress.description]{task.description}"), transient=True
357+
) as progress:
350358
progress.add_task("Computing semantic relevance...", total=None)
351359
scores = semantic_score(query, files, parse_results, cache_dir)
352-
360+
353361
sorted_files = sorted(scores.items(), key=lambda x: x[1], reverse=True)
354-
362+
355363
console.print(f"\n[bold cyan]Search Results for:[/] '{query}'\n")
356-
364+
357365
found = False
358366
for f, score in sorted_files[:limit]:
359367
if score > 0.0:
360368
rel = f.relative_to(config.root)
361369
console.print(f"[bold green]{rel}[/] (score: {score:.3f})")
362370
found = True
363-
371+
364372
if not found:
365373
console.print("[yellow]No relevant files found.[/]")
366-
374+
367375
except Exception as exc:
368376
console.print(f"[red]Error during search:[/] {exc}")
369377
raise typer.Exit(1)
@@ -453,14 +461,16 @@ def main(
453461

454462
from dataclasses import dataclass
455463

464+
456465
@dataclass
457466
class PipelineMetrics:
458467
output_path: Path
459468
files_scanned: int
460469
original_tokens: int
461470
context_tokens: int
462471

463-
def _run_pipeline(config: "object") -> PipelineMetrics:
472+
473+
def _run_pipeline(config: object) -> PipelineMetrics:
464474
"""Run the full codectx pipeline and return the output metrics."""
465475
import hashlib
466476

@@ -470,7 +480,7 @@ def _run_pipeline(config: "object") -> PipelineMetrics:
470480
from codectx.config.loader import Config
471481
from codectx.graph.builder import build_dependency_graph
472482
from codectx.output.formatter import format_context, write_context_file
473-
from codectx.parser.treesitter import parse_file, parse_files
483+
from codectx.parser.treesitter import parse_files
474484
from codectx.ranker.git_meta import collect_git_metadata, collect_recent_changes
475485
from codectx.ranker.scorer import score_files
476486
from codectx.safety import confirm_sensitive_files, find_sensitive_files
@@ -607,6 +617,7 @@ def _run_pipeline(config: "object") -> PipelineMetrics:
607617

608618
if config.layers:
609619
from codectx.output.formatter import write_layer_files
620+
610621
write_layer_files(content_sections, config.root)
611622
output_path = config.root / "FULL_CONTEXT.md"
612623
write_context_file(content_sections, output_path)
@@ -620,6 +631,7 @@ def _run_pipeline(config: "object") -> PipelineMetrics:
620631
progress.update(task, description="Done!")
621632

622633
from codectx.compressor.budget import count_tokens
634+
623635
original_tokens = sum(count_tokens(pr.raw_source) for pr in parse_results.values() if pr)
624636

625637
return PipelineMetrics(

src/codectx/compressor/tiered.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -85,23 +85,23 @@ def sort_key(p: Path) -> tuple[float, str]:
8585

8686
for path in sorted_paths:
8787
tier = tiers.get(path, 3)
88-
88+
8989
if tier == 1:
9090
if path.name in ENTRYPOINT_FILENAMES:
9191
tier1.append(path)
9292
elif core_count < MAX_CORE_MODULES:
9393
tier1.append(path)
9494
core_count += 1
9595
else:
96-
tier = 2 # Downgrade to tier 2
96+
tier = 2 # Downgrade to tier 2
9797

9898
if tier == 2:
9999
if supporting_count < MAX_SUPPORTING_MODULES:
100100
tier2.append(path)
101101
supporting_count += 1
102102
else:
103-
tier = 3 # Downgrade to tier 3
104-
103+
tier = 3 # Downgrade to tier 3
104+
105105
if tier == 3:
106106
tier3.append(path)
107107

@@ -233,7 +233,7 @@ def _tier1_content(pr: ParseResult, path: Path, root: Path) -> str:
233233
rel = path.relative_to(root).as_posix()
234234
lang = pr.language if pr.language != "unknown" else ""
235235
header = f"### `{rel}`\n"
236-
236+
237237
source = pr.raw_source
238238
if path.name in ENTRYPOINT_FILENAMES:
239239
lines = source.split("\n")

src/codectx/graph/builder.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -154,11 +154,11 @@ def detect_call_paths(self, max_depth: int = 5) -> list[list[Path]]:
154154
for entry in entries:
155155
current = entry
156156
path = [current]
157-
157+
158158
idx = self.path_to_idx.get(current)
159159
if idx is None:
160160
continue
161-
161+
162162
visited = {idx}
163163

164164
for _ in range(max_depth - 1):

0 commit comments

Comments
 (0)