Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 22 additions & 4 deletions node/flatpak_node_generator/cache.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from __future__ import annotations

import hashlib
import os
import re
import tempfile
Expand Down Expand Up @@ -152,17 +153,34 @@ class FilesystemBucketRef(Cache.BucketRef):
def __init__(self, key: str, cache_root: Path) -> None:
super().__init__(key)
self._cache_root = cache_root
self._cache_path = self._cache_root / self._hash_key(key)
self._legacy_cache_path = (
self._cache_root / FilesystemBasedCache._escape_key(key)
)

@staticmethod
def _hash_key(key: str) -> str:
return hashlib.sha256(key.encode('utf-8')).hexdigest()

self._cache_path = self._cache_root / FilesystemBasedCache._escape_key(key)
def _migrate_cache_path(self) -> None:
if not self._cache_path.exists() and self._legacy_cache_path.exists():
try:
self._legacy_cache_path.rename(self._cache_path)
except OSError:
pass

def open_read(self) -> Cache.BucketReader | None:
self._migrate_cache_path()

try:
fp = self._cache_path.open('rb')
except FileNotFoundError:
return None
else:
return FilesystemBasedCache.FilesystemBucketReader(fp)
try:
fp = self._legacy_cache_path.open('rb')
except FileNotFoundError:
return None

return FilesystemBasedCache.FilesystemBucketReader(fp)

def open_write(self) -> Cache.BucketWriter:
target = self._cache_path
Expand Down
97 changes: 97 additions & 0 deletions node/tests/test_cache.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
import hashlib
from pathlib import Path

from pytest import MonkeyPatch

from flatpak_node_generator.cache import Cache, FilesystemBasedCache


def test_cache_uses_hashed_filename(tmp_path: Path) -> None:
cache = FilesystemBasedCache(tmp_path)
Cache.instance = cache

key = 'remote-url-metadata:size:https://example.com/very/long/url'
bucket = cache.get(key)

with bucket.open_write() as writer:
writer.write(b'123')

files = list(tmp_path.iterdir())
assert len(files) == 1

filename = files[0].name

assert len(filename) == 64
assert filename == hashlib.sha256(key.encode('utf-8')).hexdigest()


def test_cache_migrates_legacy_file(tmp_path: Path) -> None:
cache = FilesystemBasedCache(tmp_path)
Cache.instance = cache

key = 'remote-url-metadata:size:https://example.com/legacy'
legacy_name = FilesystemBasedCache._escape_key(key)
legacy_path = tmp_path / legacy_name

legacy_path.write_bytes(b'legacy-data')

bucket = cache.get(key)

reader = bucket.open_read()
assert reader is not None
assert reader.read_all() == b'legacy-data'
reader.close()

files = list(tmp_path.iterdir())
assert len(files) == 1

expected_hash = hashlib.sha256(key.encode('utf-8')).hexdigest()
assert files[0].name == expected_hash


def test_cache_fallback_if_migration_fails(
tmp_path: Path, monkeypatch: MonkeyPatch
) -> None:
cache = FilesystemBasedCache(tmp_path)
Cache.instance = cache

key = 'remote-url-metadata:size:https://example.com/fallback'
legacy_name = FilesystemBasedCache._escape_key(key)
legacy_path = tmp_path / legacy_name

legacy_path.write_bytes(b'fallback-data')

def fail_rename(self: Path, target: Path) -> None:
raise OSError('rename failed')

monkeypatch.setattr(Path, 'rename', fail_rename)

bucket = cache.get(key)

reader = bucket.open_read()
assert reader is not None
assert reader.read_all() == b'fallback-data'
reader.close()

assert legacy_path.exists()

expected_hash = hashlib.sha256(key.encode('utf-8')).hexdigest()
assert not (tmp_path / expected_hash).exists()


def test_cache_never_creates_escaped_filename(tmp_path: Path) -> None:
cache = FilesystemBasedCache(tmp_path)
Cache.instance = cache

key = 'remote-url-metadata:size:https://example.com/test'
escaped_name = FilesystemBasedCache._escape_key(key)

bucket = cache.get(key)

with bucket.open_write() as writer:
writer.write(b'data')

assert not (tmp_path / escaped_name).exists()

expected_hash = hashlib.sha256(key.encode('utf-8')).hexdigest()
assert (tmp_path / expected_hash).exists()