From f3f1f8199aa79564d9ddd768ebeac483ac995024 Mon Sep 17 00:00:00 2001 From: Rana Aurangzaib Date: Wed, 25 Feb 2026 14:50:06 +0300 Subject: [PATCH 01/17] Add TEA client exception hierarchy and Pydantic models - Introduced a new `exceptions.py` file defining a hierarchy of exceptions for the TEA client, including specific error types for connection, authentication, and validation issues. - Created a new `models.py` file containing Pydantic data models for TEA API objects, including enums for identifiers, checksums, and artifact types. - Added unit tests for the new exception classes and Pydantic models to ensure proper functionality and validation. Updated dependencies in `pyproject.toml` to include `httpx` and `pydantic`, and added `respx` for testing HTTP requests. Updated `uv.lock` to reflect new package versions. --- libtea/exceptions.py | 54 ++++ libtea/models.py | 98 +++++++ pyproject.toml | 6 +- tests/test_exceptions.py | 75 +++++ tests/test_models.py | 175 ++++++++++++ uv.lock | 593 ++++++++++++--------------------------- 6 files changed, 584 insertions(+), 417 deletions(-) create mode 100644 libtea/exceptions.py create mode 100644 libtea/models.py create mode 100644 tests/test_exceptions.py create mode 100644 tests/test_models.py diff --git a/libtea/exceptions.py b/libtea/exceptions.py new file mode 100644 index 0000000..1342660 --- /dev/null +++ b/libtea/exceptions.py @@ -0,0 +1,54 @@ +"""Exception hierarchy for the TEA client library.""" + + +class TeaError(Exception): + """Base exception for all TEA client errors.""" + + +class TeaConnectionError(TeaError): + """Network or connection failure.""" + + +class TeaAuthenticationError(TeaError): + """HTTP 401 or 403 response.""" + + +class TeaNotFoundError(TeaError): + """HTTP 404 response.""" + + def __init__(self, message: str, *, error_type: str | None = None): + super().__init__(message) + self.error_type = error_type + + +class TeaRequestError(TeaError): + """HTTP 400 or other client error.""" + + +class TeaServerError(TeaError): + """HTTP 5xx response.""" + + +class TeaDiscoveryError(TeaError): + """Discovery-specific failure (bad TEI, no .well-known, no compatible endpoint).""" + + +class TeaChecksumError(TeaError): + """Checksum verification failure on artifact download.""" + + def __init__( + self, + message: str, + *, + algorithm: str | None = None, + expected: str | None = None, + actual: str | None = None, + ): + super().__init__(message) + self.algorithm = algorithm + self.expected = expected + self.actual = actual + + +class TeaValidationError(TeaError): + """Malformed server response that fails Pydantic validation.""" diff --git a/libtea/models.py b/libtea/models.py new file mode 100644 index 0000000..451447b --- /dev/null +++ b/libtea/models.py @@ -0,0 +1,98 @@ +"""Pydantic data models for TEA API objects.""" + +from enum import StrEnum + +from pydantic import BaseModel, ConfigDict, field_validator +from pydantic.alias_generators import to_camel + + +class _TeaModel(BaseModel): + """Base model with camelCase alias support.""" + + model_config = ConfigDict( + alias_generator=to_camel, + populate_by_name=True, + ) + + +# --- Enums --- + + +class IdentifierType(StrEnum): + CPE = "CPE" + TEI = "TEI" + PURL = "PURL" + + +class ChecksumAlgorithm(StrEnum): + MD5 = "MD5" + SHA_1 = "SHA-1" + SHA_256 = "SHA-256" + SHA_384 = "SHA-384" + SHA_512 = "SHA-512" + SHA3_256 = "SHA3-256" + SHA3_384 = "SHA3-384" + SHA3_512 = "SHA3-512" + BLAKE2B_256 = "BLAKE2b-256" + BLAKE2B_384 = "BLAKE2b-384" + BLAKE2B_512 = "BLAKE2b-512" + BLAKE3 = "BLAKE3" + + +class ArtifactType(StrEnum): + ATTESTATION = "ATTESTATION" + BOM = "BOM" + BUILD_META = "BUILD_META" + CERTIFICATION = "CERTIFICATION" + FORMULATION = "FORMULATION" + LICENSE = "LICENSE" + RELEASE_NOTES = "RELEASE_NOTES" + SECURITY_TXT = "SECURITY_TXT" + THREAT_MODEL = "THREAT_MODEL" + VULNERABILITIES = "VULNERABILITIES" + OTHER = "OTHER" + + +class CollectionBelongsTo(StrEnum): + COMPONENT_RELEASE = "COMPONENT_RELEASE" + PRODUCT_RELEASE = "PRODUCT_RELEASE" + + +class CollectionUpdateReasonType(StrEnum): + INITIAL_RELEASE = "INITIAL_RELEASE" + VEX_UPDATED = "VEX_UPDATED" + ARTIFACT_UPDATED = "ARTIFACT_UPDATED" + ARTIFACT_ADDED = "ARTIFACT_ADDED" + ARTIFACT_REMOVED = "ARTIFACT_REMOVED" + + +class ErrorType(StrEnum): + OBJECT_UNKNOWN = "OBJECT_UNKNOWN" + OBJECT_NOT_SHAREABLE = "OBJECT_NOT_SHAREABLE" + + +# --- Shared types --- + + +class Identifier(_TeaModel): + id_type: IdentifierType + id_value: str + + +class Checksum(_TeaModel): + alg_type: ChecksumAlgorithm + alg_value: str + + @field_validator("alg_type", mode="before") + @classmethod + def normalize_alg_type(cls, v: str) -> str: + """Normalize underscore form (SHA_256) to hyphen form (SHA-256). + + Uses member-name lookup instead of blind replace to handle + BLAKE2b casing correctly (BLAKE2B_256 -> BLAKE2b-256). + """ + if isinstance(v, str) and v not in {e.value for e in ChecksumAlgorithm}: + mapped = {e.name: e.value for e in ChecksumAlgorithm}.get(v) + if mapped is not None: + return mapped + return v diff --git a/pyproject.toml b/pyproject.toml index 84b8a55..086c261 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,7 +19,10 @@ classifiers = [ "Topic :: Security", "Topic :: Software Development :: Libraries :: Python Modules", ] -dependencies = [] +dependencies = [ + "httpx>=0.27.0,<1", + "pydantic>=2.1.0,<3", +] [project.urls] Homepage = "https://github.com/sbomify/py-libtea" @@ -34,6 +37,7 @@ dev = [ "pytest-cov>=4.1.0,<5", "ruff>=0.12.0,<0.13", "pre-commit>=4.2.0,<5", + "respx>=0.22.0,<1", ] [tool.hatch.build.targets.wheel] diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py new file mode 100644 index 0000000..1d50f0c --- /dev/null +++ b/tests/test_exceptions.py @@ -0,0 +1,75 @@ +import pytest + +from libtea.exceptions import ( + TeaAuthenticationError, + TeaChecksumError, + TeaConnectionError, + TeaDiscoveryError, + TeaError, + TeaNotFoundError, + TeaRequestError, + TeaServerError, + TeaValidationError, +) + + +def test_tea_error_inherits_exception(): + assert issubclass(TeaError, Exception) + + +def test_tea_error_is_base(): + assert issubclass(TeaConnectionError, TeaError) + assert issubclass(TeaAuthenticationError, TeaError) + assert issubclass(TeaNotFoundError, TeaError) + assert issubclass(TeaRequestError, TeaError) + assert issubclass(TeaServerError, TeaError) + assert issubclass(TeaDiscoveryError, TeaError) + assert issubclass(TeaChecksumError, TeaError) + assert issubclass(TeaValidationError, TeaError) + + +def test_tea_error_message(): + err = TeaError("something went wrong") + assert str(err) == "something went wrong" + + +def test_tea_not_found_with_error_type(): + err = TeaNotFoundError("not found", error_type="OBJECT_UNKNOWN") + assert err.error_type == "OBJECT_UNKNOWN" + assert "not found" in str(err) + + +def test_tea_not_found_default_error_type(): + err = TeaNotFoundError("not found") + assert err.error_type is None + + +def test_tea_checksum_error_fields(): + err = TeaChecksumError("mismatch", algorithm="SHA-256", expected="abc", actual="def") + assert err.algorithm == "SHA-256" + assert err.expected == "abc" + assert err.actual == "def" + + +def test_tea_checksum_error_default_fields(): + err = TeaChecksumError("mismatch") + assert err.algorithm is None + assert err.expected is None + assert err.actual is None + + +@pytest.mark.parametrize( + "exc_class", + [ + TeaConnectionError, + TeaAuthenticationError, + TeaRequestError, + TeaServerError, + TeaDiscoveryError, + TeaValidationError, + ], +) +def test_simple_subclass_raise_and_catch(exc_class): + with pytest.raises(TeaError) as exc_info: + raise exc_class("test message") + assert str(exc_info.value) == "test message" diff --git a/tests/test_models.py b/tests/test_models.py new file mode 100644 index 0000000..3e03f72 --- /dev/null +++ b/tests/test_models.py @@ -0,0 +1,175 @@ +import pytest +from pydantic import ValidationError + +from libtea.models import ( + ArtifactType, + Checksum, + ChecksumAlgorithm, + CollectionBelongsTo, + CollectionUpdateReasonType, + ErrorType, + Identifier, + IdentifierType, +) + + +class TestEnums: + def test_identifier_type_values(self): + assert IdentifierType.CPE == "CPE" + assert IdentifierType.TEI == "TEI" + assert IdentifierType.PURL == "PURL" + + def test_checksum_algorithm_values(self): + assert ChecksumAlgorithm.SHA_256 == "SHA-256" + assert ChecksumAlgorithm.SHA3_512 == "SHA3-512" + assert ChecksumAlgorithm.BLAKE3 == "BLAKE3" + + def test_artifact_type_values(self): + assert ArtifactType.BOM == "BOM" + assert ArtifactType.VULNERABILITIES == "VULNERABILITIES" + assert ArtifactType.OTHER == "OTHER" + + def test_collection_belongs_to(self): + assert CollectionBelongsTo.COMPONENT_RELEASE == "COMPONENT_RELEASE" + assert CollectionBelongsTo.PRODUCT_RELEASE == "PRODUCT_RELEASE" + + def test_collection_update_reason_type(self): + assert CollectionUpdateReasonType.INITIAL_RELEASE == "INITIAL_RELEASE" + assert CollectionUpdateReasonType.VEX_UPDATED == "VEX_UPDATED" + + +class TestSharedTypes: + def test_identifier_from_json(self): + data = {"idType": "PURL", "idValue": "pkg:maven/org.apache/log4j"} + ident = Identifier.model_validate(data) + assert ident.id_type == IdentifierType.PURL + assert ident.id_value == "pkg:maven/org.apache/log4j" + + def test_identifier_to_json(self): + ident = Identifier(id_type=IdentifierType.CPE, id_value="cpe:2.3:a:apache:log4j") + data = ident.model_dump(by_alias=True) + assert data == {"idType": "CPE", "idValue": "cpe:2.3:a:apache:log4j"} + + def test_checksum_from_json(self): + data = {"algType": "SHA-256", "algValue": "abcdef1234567890"} + cs = Checksum.model_validate(data) + assert cs.alg_type == ChecksumAlgorithm.SHA_256 + assert cs.alg_value == "abcdef1234567890" + + def test_checksum_underscore_normalization(self): + """Servers may use SHA_256 (underscore) instead of SHA-256 (hyphen).""" + data = {"algType": "SHA_256", "algValue": "abcdef1234567890"} + cs = Checksum.model_validate(data) + assert cs.alg_type == ChecksumAlgorithm.SHA_256 + + def test_enum_is_strenum(self): + assert isinstance(IdentifierType.CPE, str) + assert isinstance(ChecksumAlgorithm.SHA_256, str) + assert isinstance(ArtifactType.BOM, str) + + def test_checksum_to_json(self): + cs = Checksum(alg_type=ChecksumAlgorithm.SHA_256, alg_value="abcdef1234567890") + data = cs.model_dump(by_alias=True) + assert data == {"algType": "SHA-256", "algValue": "abcdef1234567890"} + + def test_populate_by_name(self): + ident = Identifier.model_validate({"id_type": "TEI", "id_value": "tei:example"}) + assert ident.id_type == IdentifierType.TEI + + def test_extra_fields_ignored(self): + cs = Checksum.model_validate({"algType": "SHA-256", "algValue": "deadbeef", "extra": "ignored"}) + assert not hasattr(cs, "extra") + + +class TestChecksumNormalization: + @pytest.mark.parametrize( + "raw, expected_member", + [ + ("SHA_1", ChecksumAlgorithm.SHA_1), + ("SHA_256", ChecksumAlgorithm.SHA_256), + ("SHA_384", ChecksumAlgorithm.SHA_384), + ("SHA_512", ChecksumAlgorithm.SHA_512), + ("SHA3_256", ChecksumAlgorithm.SHA3_256), + ("SHA3_384", ChecksumAlgorithm.SHA3_384), + ("SHA3_512", ChecksumAlgorithm.SHA3_512), + ("BLAKE2B_256", ChecksumAlgorithm.BLAKE2B_256), + ("BLAKE2B_384", ChecksumAlgorithm.BLAKE2B_384), + ("BLAKE2B_512", ChecksumAlgorithm.BLAKE2B_512), + ], + ) + def test_underscore_to_value(self, raw, expected_member): + cs = Checksum.model_validate({"algType": raw, "algValue": "aabbcc"}) + assert cs.alg_type == expected_member + + def test_valid_values_pass_through(self): + for member in ChecksumAlgorithm: + cs = Checksum.model_validate({"algType": member.value, "algValue": "aabbcc"}) + assert cs.alg_type == member + + +class TestValidationErrors: + def test_checksum_rejects_unknown_algorithm(self): + with pytest.raises(ValidationError): + Checksum.model_validate({"algType": "CRC32", "algValue": "aabbcc"}) + + def test_identifier_rejects_unknown_type(self): + with pytest.raises(ValidationError): + Identifier.model_validate({"idType": "SPDXID", "idValue": "some-value"}) + + def test_checksum_rejects_missing_alg_value(self): + with pytest.raises(ValidationError): + Checksum.model_validate({"algType": "SHA-256"}) + + def test_identifier_rejects_missing_id_value(self): + with pytest.raises(ValidationError): + Identifier.model_validate({"idType": "PURL"}) + + +class TestEnumCompleteness: + def test_checksum_algorithm_all_members(self): + expected = { + "MD5", + "SHA-1", + "SHA-256", + "SHA-384", + "SHA-512", + "SHA3-256", + "SHA3-384", + "SHA3-512", + "BLAKE2b-256", + "BLAKE2b-384", + "BLAKE2b-512", + "BLAKE3", + } + assert {e.value for e in ChecksumAlgorithm} == expected + + def test_artifact_type_all_members(self): + expected = { + "ATTESTATION", + "BOM", + "BUILD_META", + "CERTIFICATION", + "FORMULATION", + "LICENSE", + "RELEASE_NOTES", + "SECURITY_TXT", + "THREAT_MODEL", + "VULNERABILITIES", + "OTHER", + } + assert {e.value for e in ArtifactType} == expected + + def test_collection_update_reason_all_members(self): + expected = { + "INITIAL_RELEASE", + "VEX_UPDATED", + "ARTIFACT_UPDATED", + "ARTIFACT_ADDED", + "ARTIFACT_REMOVED", + } + assert {e.value for e in CollectionUpdateReasonType} == expected + + def test_error_type_values(self): + assert ErrorType.OBJECT_UNKNOWN == "OBJECT_UNKNOWN" + assert ErrorType.OBJECT_NOT_SHAREABLE == "OBJECT_NOT_SHAREABLE" + assert isinstance(ErrorType.OBJECT_UNKNOWN, str) diff --git a/uv.lock b/uv.lock index b526a26..35a2d6f 100644 --- a/uv.lock +++ b/uv.lock @@ -3,56 +3,34 @@ revision = 3 requires-python = ">=3.11" [[package]] -name = "backports-tarfile" -version = "1.2.0" +name = "annotated-types" +version = "0.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/86/72/cd9b395f25e290e633655a100af28cb253e4393396264a98bd5f5951d50f/backports_tarfile-1.2.0.tar.gz", hash = "sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991", size = 86406, upload-time = "2024-05-28T17:01:54.731Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b9/fa/123043af240e49752f1c4bd24da5053b6bd00cad78c2be53c0d1e8b975bc/backports.tarfile-1.2.0-py3-none-any.whl", hash = "sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34", size = 30181, upload-time = "2024-05-28T17:01:53.112Z" }, + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, ] [[package]] -name = "certifi" -version = "2026.1.4" +name = "anyio" +version = "4.12.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268, upload-time = "2026-01-04T02:42:41.825Z" } +dependencies = [ + { name = "idna" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" }, + { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" }, ] [[package]] -name = "cffi" -version = "2.0.0" +name = "certifi" +version = "2026.2.25" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pycparser", marker = "implementation_name != 'PyPy'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +sdist = { url = "https://files.pythonhosted.org/packages/af/2d/7bf41579a8986e348fa033a31cdd0e4121114f6bce2457e8876010b092dd/certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7", size = 155029, upload-time = "2026-02-25T02:54:17.342Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, - { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, - { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" }, - { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, - { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, - { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, - { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, - { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, - { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, - { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, - { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, - { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, - { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, - { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, - { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, - { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, - { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, - { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, - { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, - { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, - { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, - { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, - { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, - { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, + { url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" }, ] [[package]] @@ -64,79 +42,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/db/3c/33bac158f8ab7f89b2e59426d5fe2e4f63f7ed25df84c036890172b412b5/cfgv-3.5.0-py2.py3-none-any.whl", hash = "sha256:a8dc6b26ad22ff227d2634a65cb388215ce6cc96bbcc5cfde7641ae87e8dacc0", size = 7445, upload-time = "2025-11-19T20:55:50.744Z" }, ] -[[package]] -name = "charset-normalizer" -version = "3.4.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/27/c6491ff4954e58a10f69ad90aca8a1b6fe9c5d3c6f380907af3c37435b59/charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8", size = 206988, upload-time = "2025-10-14T04:40:33.79Z" }, - { url = "https://files.pythonhosted.org/packages/94/59/2e87300fe67ab820b5428580a53cad894272dbb97f38a7a814a2a1ac1011/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0", size = 147324, upload-time = "2025-10-14T04:40:34.961Z" }, - { url = "https://files.pythonhosted.org/packages/07/fb/0cf61dc84b2b088391830f6274cb57c82e4da8bbc2efeac8c025edb88772/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3", size = 142742, upload-time = "2025-10-14T04:40:36.105Z" }, - { url = "https://files.pythonhosted.org/packages/62/8b/171935adf2312cd745d290ed93cf16cf0dfe320863ab7cbeeae1dcd6535f/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc", size = 160863, upload-time = "2025-10-14T04:40:37.188Z" }, - { url = "https://files.pythonhosted.org/packages/09/73/ad875b192bda14f2173bfc1bc9a55e009808484a4b256748d931b6948442/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897", size = 157837, upload-time = "2025-10-14T04:40:38.435Z" }, - { url = "https://files.pythonhosted.org/packages/6d/fc/de9cce525b2c5b94b47c70a4b4fb19f871b24995c728e957ee68ab1671ea/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381", size = 151550, upload-time = "2025-10-14T04:40:40.053Z" }, - { url = "https://files.pythonhosted.org/packages/55/c2/43edd615fdfba8c6f2dfbd459b25a6b3b551f24ea21981e23fb768503ce1/charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815", size = 149162, upload-time = "2025-10-14T04:40:41.163Z" }, - { url = "https://files.pythonhosted.org/packages/03/86/bde4ad8b4d0e9429a4e82c1e8f5c659993a9a863ad62c7df05cf7b678d75/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0", size = 150019, upload-time = "2025-10-14T04:40:42.276Z" }, - { url = "https://files.pythonhosted.org/packages/1f/86/a151eb2af293a7e7bac3a739b81072585ce36ccfb4493039f49f1d3cae8c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161", size = 143310, upload-time = "2025-10-14T04:40:43.439Z" }, - { url = "https://files.pythonhosted.org/packages/b5/fe/43dae6144a7e07b87478fdfc4dbe9efd5defb0e7ec29f5f58a55aeef7bf7/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4", size = 162022, upload-time = "2025-10-14T04:40:44.547Z" }, - { url = "https://files.pythonhosted.org/packages/80/e6/7aab83774f5d2bca81f42ac58d04caf44f0cc2b65fc6db2b3b2e8a05f3b3/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89", size = 149383, upload-time = "2025-10-14T04:40:46.018Z" }, - { url = "https://files.pythonhosted.org/packages/4f/e8/b289173b4edae05c0dde07f69f8db476a0b511eac556dfe0d6bda3c43384/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569", size = 159098, upload-time = "2025-10-14T04:40:47.081Z" }, - { url = "https://files.pythonhosted.org/packages/d8/df/fe699727754cae3f8478493c7f45f777b17c3ef0600e28abfec8619eb49c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224", size = 152991, upload-time = "2025-10-14T04:40:48.246Z" }, - { url = "https://files.pythonhosted.org/packages/1a/86/584869fe4ddb6ffa3bd9f491b87a01568797fb9bd8933f557dba9771beaf/charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a", size = 99456, upload-time = "2025-10-14T04:40:49.376Z" }, - { url = "https://files.pythonhosted.org/packages/65/f6/62fdd5feb60530f50f7e38b4f6a1d5203f4d16ff4f9f0952962c044e919a/charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016", size = 106978, upload-time = "2025-10-14T04:40:50.844Z" }, - { url = "https://files.pythonhosted.org/packages/7a/9d/0710916e6c82948b3be62d9d398cb4fcf4e97b56d6a6aeccd66c4b2f2bd5/charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1", size = 99969, upload-time = "2025-10-14T04:40:52.272Z" }, - { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" }, - { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" }, - { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" }, - { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" }, - { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" }, - { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" }, - { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" }, - { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" }, - { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" }, - { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" }, - { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" }, - { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" }, - { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" }, - { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" }, - { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" }, - { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" }, - { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" }, - { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" }, - { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" }, - { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" }, - { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" }, - { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" }, - { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" }, - { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" }, - { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" }, - { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" }, - { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" }, - { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" }, - { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" }, - { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" }, - { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" }, - { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" }, - { url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" }, - { url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" }, - { url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" }, - { url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" }, - { url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" }, - { url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" }, - { url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" }, - { url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" }, - { url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" }, - { url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" }, - { url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" }, - { url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" }, - { url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" }, - { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" }, - { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" }, - { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, -] - [[package]] name = "colorama" version = "0.4.6" @@ -251,84 +156,58 @@ toml = [ ] [[package]] -name = "cryptography" -version = "46.0.5" +name = "distlib" +version = "0.4.0" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/60/04/ee2a9e8542e4fa2773b81771ff8349ff19cdd56b7258a0cc442639052edb/cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d", size = 750064, upload-time = "2026-02-10T19:18:38.255Z" } +sdist = { url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d", size = 614605, upload-time = "2025-07-17T16:52:00.465Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ff/9e/6b4397a3e3d15123de3b1806ef342522393d50736c13b20ec4c9ea6693a6/cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b", size = 4275637, upload-time = "2026-02-10T19:17:10.53Z" }, - { url = "https://files.pythonhosted.org/packages/63/e7/471ab61099a3920b0c77852ea3f0ea611c9702f651600397ac567848b897/cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b", size = 4424742, upload-time = "2026-02-10T19:17:12.388Z" }, - { url = "https://files.pythonhosted.org/packages/37/53/a18500f270342d66bf7e4d9f091114e31e5ee9e7375a5aba2e85a91e0044/cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263", size = 4277528, upload-time = "2026-02-10T19:17:13.853Z" }, - { url = "https://files.pythonhosted.org/packages/6b/e7/237155ae19a9023de7e30ec64e5d99a9431a567407ac21170a046d22a5a3/cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed", size = 4456855, upload-time = "2026-02-10T19:17:17.221Z" }, - { url = "https://files.pythonhosted.org/packages/2d/87/fc628a7ad85b81206738abbd213b07702bcbdada1dd43f72236ef3cffbb5/cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2", size = 3984635, upload-time = "2026-02-10T19:17:18.792Z" }, - { url = "https://files.pythonhosted.org/packages/84/29/65b55622bde135aedf4565dc509d99b560ee4095e56989e815f8fd2aa910/cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2", size = 4277038, upload-time = "2026-02-10T19:17:20.256Z" }, - { url = "https://files.pythonhosted.org/packages/6d/1a/c1ba8fead184d6e3d5afcf03d569acac5ad063f3ac9fb7258af158f7e378/cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731", size = 4456482, upload-time = "2026-02-10T19:17:25.133Z" }, - { url = "https://files.pythonhosted.org/packages/f9/e5/3fb22e37f66827ced3b902cf895e6a6bc1d095b5b26be26bd13c441fdf19/cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82", size = 4405497, upload-time = "2026-02-10T19:17:26.66Z" }, - { url = "https://files.pythonhosted.org/packages/1a/df/9d58bb32b1121a8a2f27383fabae4d63080c7ca60b9b5c88be742be04ee7/cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1", size = 4667819, upload-time = "2026-02-10T19:17:28.569Z" }, - { url = "https://files.pythonhosted.org/packages/67/c8/581a6702e14f0898a0848105cbefd20c058099e2c2d22ef4e476dfec75d7/cryptography-46.0.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5be7bf2fb40769e05739dd0046e7b26f9d4670badc7b032d6ce4db64dddc0678", size = 4265728, upload-time = "2026-02-10T19:17:35.569Z" }, - { url = "https://files.pythonhosted.org/packages/dd/4a/ba1a65ce8fc65435e5a849558379896c957870dd64fecea97b1ad5f46a37/cryptography-46.0.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe346b143ff9685e40192a4960938545c699054ba11d4f9029f94751e3f71d87", size = 4408287, upload-time = "2026-02-10T19:17:36.938Z" }, - { url = "https://files.pythonhosted.org/packages/f8/67/8ffdbf7b65ed1ac224d1c2df3943553766914a8ca718747ee3871da6107e/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c69fd885df7d089548a42d5ec05be26050ebcd2283d89b3d30676eb32ff87dee", size = 4270291, upload-time = "2026-02-10T19:17:38.748Z" }, - { url = "https://files.pythonhosted.org/packages/3b/02/cfe39181b02419bbbbcf3abdd16c1c5c8541f03ca8bda240debc467d5a12/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:1abfdb89b41c3be0365328a410baa9df3ff8a9110fb75e7b52e66803ddabc9a9", size = 4442199, upload-time = "2026-02-10T19:17:41.789Z" }, - { url = "https://files.pythonhosted.org/packages/c0/96/2fcaeb4873e536cf71421a388a6c11b5bc846e986b2b069c79363dc1648e/cryptography-46.0.5-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:d66e421495fdb797610a08f43b05269e0a5ea7f5e652a89bfd5a7d3c1dee3648", size = 3960131, upload-time = "2026-02-10T19:17:43.379Z" }, - { url = "https://files.pythonhosted.org/packages/d8/d2/b27631f401ddd644e94c5cf33c9a4069f72011821cf3dc7309546b0642a0/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:4e817a8920bfbcff8940ecfd60f23d01836408242b30f1a708d93198393a80b4", size = 4270072, upload-time = "2026-02-10T19:17:45.481Z" }, - { url = "https://files.pythonhosted.org/packages/d2/b9/cf73ddf8ef1164330eb0b199a589103c363afa0cf794218c24d524a58eab/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a3d1fae9863299076f05cb8a778c467578262fae09f9dc0ee9b12eb4268ce663", size = 4441741, upload-time = "2026-02-10T19:17:48.661Z" }, - { url = "https://files.pythonhosted.org/packages/5f/eb/eee00b28c84c726fe8fa0158c65afe312d9c3b78d9d01daf700f1f6e37ff/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4143987a42a2397f2fc3b4d7e3a7d313fbe684f67ff443999e803dd75a76826", size = 4396728, upload-time = "2026-02-10T19:17:50.058Z" }, - { url = "https://files.pythonhosted.org/packages/65/f4/6bc1a9ed5aef7145045114b75b77c2a8261b4d38717bd8dea111a63c3442/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7d731d4b107030987fd61a7f8ab512b25b53cef8f233a97379ede116f30eb67d", size = 4652001, upload-time = "2026-02-10T19:17:51.54Z" }, - { url = "https://files.pythonhosted.org/packages/0f/04/c85bdeab78c8bc77b701bf0d9bdcf514c044e18a46dcff330df5448631b0/cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18", size = 4275349, upload-time = "2026-02-10T19:17:58.419Z" }, - { url = "https://files.pythonhosted.org/packages/5c/32/9b87132a2f91ee7f5223b091dc963055503e9b442c98fc0b8a5ca765fab0/cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235", size = 4420667, upload-time = "2026-02-10T19:18:00.619Z" }, - { url = "https://files.pythonhosted.org/packages/a1/a6/a7cb7010bec4b7c5692ca6f024150371b295ee1c108bdc1c400e4c44562b/cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a", size = 4276980, upload-time = "2026-02-10T19:18:02.379Z" }, - { url = "https://files.pythonhosted.org/packages/37/19/e1b8f964a834eddb44fa1b9a9976f4e414cbb7aa62809b6760c8803d22d1/cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614", size = 4453674, upload-time = "2026-02-10T19:18:05.588Z" }, - { url = "https://files.pythonhosted.org/packages/db/ed/db15d3956f65264ca204625597c410d420e26530c4e2943e05a0d2f24d51/cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229", size = 3978801, upload-time = "2026-02-10T19:18:07.167Z" }, - { url = "https://files.pythonhosted.org/packages/41/e2/df40a31d82df0a70a0daf69791f91dbb70e47644c58581d654879b382d11/cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1", size = 4276755, upload-time = "2026-02-10T19:18:09.813Z" }, - { url = "https://files.pythonhosted.org/packages/99/0f/a3076874e9c88ecb2ecc31382f6e7c21b428ede6f55aafa1aa272613e3cd/cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c", size = 4452794, upload-time = "2026-02-10T19:18:12.914Z" }, - { url = "https://files.pythonhosted.org/packages/02/ef/ffeb542d3683d24194a38f66ca17c0a4b8bf10631feef44a7ef64e631b1a/cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4", size = 4404160, upload-time = "2026-02-10T19:18:14.375Z" }, - { url = "https://files.pythonhosted.org/packages/96/93/682d2b43c1d5f1406ed048f377c0fc9fc8f7b0447a478d5c65ab3d3a66eb/cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9", size = 4667123, upload-time = "2026-02-10T19:18:15.886Z" }, - { url = "https://files.pythonhosted.org/packages/e9/6f/6cc6cc9955caa6eaf83660b0da2b077c7fe8ff9950a3c5e45d605038d439/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bc84e875994c3b445871ea7181d424588171efec3e185dced958dad9e001950a", size = 4218321, upload-time = "2026-02-10T19:18:22.349Z" }, - { url = "https://files.pythonhosted.org/packages/3e/5d/c4da701939eeee699566a6c1367427ab91a8b7088cc2328c09dbee940415/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2ae6971afd6246710480e3f15824ed3029a60fc16991db250034efd0b9fb4356", size = 4381786, upload-time = "2026-02-10T19:18:24.529Z" }, - { url = "https://files.pythonhosted.org/packages/ac/97/a538654732974a94ff96c1db621fa464f455c02d4bb7d2652f4edc21d600/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d861ee9e76ace6cf36a6a89b959ec08e7bc2493ee39d07ffe5acb23ef46d27da", size = 4217990, upload-time = "2026-02-10T19:18:25.957Z" }, - { url = "https://files.pythonhosted.org/packages/ae/11/7e500d2dd3ba891197b9efd2da5454b74336d64a7cc419aa7327ab74e5f6/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:2b7a67c9cd56372f3249b39699f2ad479f6991e62ea15800973b956f4b73e257", size = 4381252, upload-time = "2026-02-10T19:18:27.496Z" }, + { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" }, ] [[package]] -name = "distlib" -version = "0.4.0" +name = "filelock" +version = "3.24.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d", size = 614605, upload-time = "2025-07-17T16:52:00.465Z" } +sdist = { url = "https://files.pythonhosted.org/packages/73/92/a8e2479937ff39185d20dd6a851c1a63e55849e447a55e798cc2e1f49c65/filelock-3.24.3.tar.gz", hash = "sha256:011a5644dc937c22699943ebbfc46e969cdde3e171470a6e40b9533e5a72affa", size = 37935, upload-time = "2026-02-19T00:48:20.543Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" }, + { url = "https://files.pythonhosted.org/packages/9c/0f/5d0c71a1aefeb08efff26272149e07ab922b64f46c63363756224bd6872e/filelock-3.24.3-py3-none-any.whl", hash = "sha256:426e9a4660391f7f8a810d71b0555bce9008b0a1cc342ab1f6947d37639e002d", size = 24331, upload-time = "2026-02-19T00:48:18.465Z" }, ] [[package]] -name = "docutils" -version = "0.22.4" +name = "h11" +version = "0.16.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ae/b6/03bb70946330e88ffec97aefd3ea75ba575cb2e762061e0e62a213befee8/docutils-0.22.4.tar.gz", hash = "sha256:4db53b1fde9abecbb74d91230d32ab626d94f6badfc575d6db9194a49df29968", size = 2291750, upload-time = "2025-12-18T19:00:26.443Z" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/02/10/5da547df7a391dcde17f59520a231527b8571e6f46fc8efb02ccb370ab12/docutils-0.22.4-py3-none-any.whl", hash = "sha256:d0013f540772d1420576855455d050a2180186c91c15779301ac2ccb3eeb68de", size = 633196, upload-time = "2025-12-18T19:00:18.077Z" }, + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, ] [[package]] -name = "filelock" -version = "3.24.3" +name = "httpcore" +version = "1.0.9" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/73/92/a8e2479937ff39185d20dd6a851c1a63e55849e447a55e798cc2e1f49c65/filelock-3.24.3.tar.gz", hash = "sha256:011a5644dc937c22699943ebbfc46e969cdde3e171470a6e40b9533e5a72affa", size = 37935, upload-time = "2026-02-19T00:48:20.543Z" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9c/0f/5d0c71a1aefeb08efff26272149e07ab922b64f46c63363756224bd6872e/filelock-3.24.3-py3-none-any.whl", hash = "sha256:426e9a4660391f7f8a810d71b0555bce9008b0a1cc342ab1f6947d37639e002d", size = 24331, upload-time = "2026-02-19T00:48:18.465Z" }, + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, ] [[package]] -name = "id" -version = "1.6.1" +name = "httpx" +version = "0.28.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "urllib3" }, + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6d/04/c2156091427636080787aac190019dc64096e56a23b7364d3c1764ee3a06/id-1.6.1.tar.gz", hash = "sha256:d0732d624fb46fd4e7bc4e5152f00214450953b9e772c182c1c22964def1a069", size = 18088, upload-time = "2026-02-04T16:19:41.26Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/42/77/de194443bf38daed9452139e960c632b0ef9f9a5dd9ce605fdf18ca9f1b1/id-1.6.1-py3-none-any.whl", hash = "sha256:f5ec41ed2629a508f5d0988eda142e190c9c6da971100612c4de9ad9f9b237ca", size = 14689, upload-time = "2026-02-04T16:19:40.051Z" }, + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, ] [[package]] @@ -349,18 +228,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, ] -[[package]] -name = "importlib-metadata" -version = "8.7.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "zipp" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/f3/49/3b30cad09e7771a4982d9975a8cbf64f00d4a1ececb53297f1d9a7be1b10/importlib_metadata-8.7.1.tar.gz", hash = "sha256:49fef1ae6440c182052f407c8d34a68f72efc36db9ca90dc0113398f2fdde8bb", size = 57107, upload-time = "2025-12-21T10:00:19.278Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl", hash = "sha256:5a1f80bf1daa489495071efbb095d75a634cf28a8bc299581244063b53176151", size = 27865, upload-time = "2025-12-21T10:00:18.329Z" }, -] - [[package]] name = "iniconfig" version = "2.3.0" @@ -370,156 +237,37 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, ] -[[package]] -name = "jaraco-classes" -version = "3.4.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "more-itertools" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/06/c0/ed4a27bc5571b99e3cff68f8a9fa5b56ff7df1c2251cc715a652ddd26402/jaraco.classes-3.4.0.tar.gz", hash = "sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd", size = 11780, upload-time = "2024-03-31T07:27:36.643Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7f/66/b15ce62552d84bbfcec9a4873ab79d993a1dd4edb922cbfccae192bd5b5f/jaraco.classes-3.4.0-py3-none-any.whl", hash = "sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790", size = 6777, upload-time = "2024-03-31T07:27:34.792Z" }, -] - -[[package]] -name = "jaraco-context" -version = "6.1.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "backports-tarfile", marker = "python_full_version < '3.12'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/cb/9c/a788f5bb29c61e456b8ee52ce76dbdd32fd72cd73dd67bc95f42c7a8d13c/jaraco_context-6.1.0.tar.gz", hash = "sha256:129a341b0a85a7db7879e22acd66902fda67882db771754574338898b2d5d86f", size = 15850, upload-time = "2026-01-13T02:53:53.847Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8d/48/aa685dbf1024c7bd82bede569e3a85f82c32fd3d79ba5fea578f0159571a/jaraco_context-6.1.0-py3-none-any.whl", hash = "sha256:a43b5ed85815223d0d3cfdb6d7ca0d2bc8946f28f30b6f3216bda070f68badda", size = 7065, upload-time = "2026-01-13T02:53:53.031Z" }, -] - -[[package]] -name = "jaraco-functools" -version = "4.4.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "more-itertools" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/0f/27/056e0638a86749374d6f57d0b0db39f29509cce9313cf91bdc0ac4d91084/jaraco_functools-4.4.0.tar.gz", hash = "sha256:da21933b0417b89515562656547a77b4931f98176eb173644c0d35032a33d6bb", size = 19943, upload-time = "2025-12-21T09:29:43.6Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/c4/813bb09f0985cb21e959f21f2464169eca882656849adf727ac7bb7e1767/jaraco_functools-4.4.0-py3-none-any.whl", hash = "sha256:9eec1e36f45c818d9bf307c8948eb03b2b56cd44087b3cdc989abca1f20b9176", size = 10481, upload-time = "2025-12-21T09:29:42.27Z" }, -] - -[[package]] -name = "jeepney" -version = "0.9.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7b/6f/357efd7602486741aa73ffc0617fb310a29b588ed0fd69c2399acbb85b0c/jeepney-0.9.0.tar.gz", hash = "sha256:cf0e9e845622b81e4a28df94c40345400256ec608d0e55bb8a3feaa9163f5732", size = 106758, upload-time = "2025-02-27T18:51:01.684Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b2/a3/e137168c9c44d18eff0376253da9f1e9234d0239e0ee230d2fee6cea8e55/jeepney-0.9.0-py3-none-any.whl", hash = "sha256:97e5714520c16fc0a45695e5365a2e11b81ea79bba796e26f9f1d178cb182683", size = 49010, upload-time = "2025-02-27T18:51:00.104Z" }, -] - -[[package]] -name = "keyring" -version = "25.7.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "importlib-metadata", marker = "python_full_version < '3.12'" }, - { name = "jaraco-classes" }, - { name = "jaraco-context" }, - { name = "jaraco-functools" }, - { name = "jeepney", marker = "sys_platform == 'linux'" }, - { name = "pywin32-ctypes", marker = "sys_platform == 'win32'" }, - { name = "secretstorage", marker = "sys_platform == 'linux'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/43/4b/674af6ef2f97d56f0ab5153bf0bfa28ccb6c3ed4d1babf4305449668807b/keyring-25.7.0.tar.gz", hash = "sha256:fe01bd85eb3f8fb3dd0405defdeac9a5b4f6f0439edbb3149577f244a2e8245b", size = 63516, upload-time = "2025-11-16T16:26:09.482Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/81/db/e655086b7f3a705df045bf0933bdd9c2f79bb3c97bfef1384598bb79a217/keyring-25.7.0-py3-none-any.whl", hash = "sha256:be4a0b195f149690c166e850609a477c532ddbfbaed96a404d4e43f8d5e2689f", size = 39160, upload-time = "2025-11-16T16:26:08.402Z" }, -] - [[package]] name = "libtea" version = "0.1.0" source = { editable = "." } +dependencies = [ + { name = "httpx" }, + { name = "pydantic" }, +] [package.dev-dependencies] dev = [ { name = "pre-commit" }, { name = "pytest" }, { name = "pytest-cov" }, + { name = "respx" }, { name = "ruff" }, - { name = "twine" }, ] [package.metadata] +requires-dist = [ + { name = "httpx", specifier = ">=0.27.0,<1" }, + { name = "pydantic", specifier = ">=2.1.0,<3" }, +] [package.metadata.requires-dev] dev = [ { name = "pre-commit", specifier = ">=4.2.0,<5" }, { name = "pytest", specifier = ">=8.0.0,<9" }, { name = "pytest-cov", specifier = ">=4.1.0,<5" }, + { name = "respx", specifier = ">=0.22.0,<1" }, { name = "ruff", specifier = ">=0.12.0,<0.13" }, - { name = "twine", specifier = ">=6.2.0" }, -] - -[[package]] -name = "markdown-it-py" -version = "4.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "mdurl" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, -] - -[[package]] -name = "mdurl" -version = "0.1.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, -] - -[[package]] -name = "more-itertools" -version = "10.8.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ea/5d/38b681d3fce7a266dd9ab73c66959406d565b3e85f21d5e66e1181d93721/more_itertools-10.8.0.tar.gz", hash = "sha256:f638ddf8a1a0d134181275fb5d58b086ead7c6a72429ad725c67503f13ba30bd", size = 137431, upload-time = "2025-09-02T15:23:11.018Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a4/8e/469e5a4a2f5855992e425f3cb33804cc07bf18d48f2db061aec61ce50270/more_itertools-10.8.0-py3-none-any.whl", hash = "sha256:52d4362373dcf7c52546bc4af9a86ee7c4579df9a8dc268be0a2f949d376cc9b", size = 69667, upload-time = "2025-09-02T15:23:09.635Z" }, -] - -[[package]] -name = "nh3" -version = "0.3.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cc/37/ab55eb2b05e334ff9a1ad52c556ace1f9c20a3f63613a165d384d5387657/nh3-0.3.3.tar.gz", hash = "sha256:185ed41b88c910b9ca8edc89ca3b4be688a12cb9de129d84befa2f74a0039fee", size = 18968, upload-time = "2026-02-14T09:35:15.664Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/a4/834f0ebd80844ce67e1bdb011d6f844f61cdb4c1d7cdc56a982bc054cc00/nh3-0.3.3-cp314-cp314t-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:21b058cd20d9f0919421a820a2843fdb5e1749c0bf57a6247ab8f4ba6723c9fc", size = 1428680, upload-time = "2026-02-14T09:34:33.015Z" }, - { url = "https://files.pythonhosted.org/packages/7f/1a/a7d72e750f74c6b71befbeebc4489579fe783466889d41f32e34acde0b6b/nh3-0.3.3-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4400a73c2a62859e769f9d36d1b5a7a5c65c4179d1dddd2f6f3095b2db0cbfc", size = 799003, upload-time = "2026-02-14T09:34:35.108Z" }, - { url = "https://files.pythonhosted.org/packages/58/d5/089eb6d65da139dc2223b83b2627e00872eccb5e1afdf5b1d76eb6ad3fcc/nh3-0.3.3-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1ef87f8e916321a88b45f2d597f29bd56e560ed4568a50f0f1305afab86b7189", size = 846818, upload-time = "2026-02-14T09:34:37Z" }, - { url = "https://files.pythonhosted.org/packages/9b/c6/44a0b65fc7b213a3a725f041ef986534b100e58cd1a2e00f0fd3c9603893/nh3-0.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:a446eae598987f49ee97ac2f18eafcce4e62e7574bd1eb23782e4702e54e217d", size = 1012537, upload-time = "2026-02-14T09:34:38.515Z" }, - { url = "https://files.pythonhosted.org/packages/94/3a/91bcfcc0a61b286b8b25d39e288b9c0ba91c3290d402867d1cd705169844/nh3-0.3.3-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:0d5eb734a78ac364af1797fef718340a373f626a9ff6b4fb0b4badf7927e7b81", size = 1095435, upload-time = "2026-02-14T09:34:40.022Z" }, - { url = "https://files.pythonhosted.org/packages/fd/fd/4617a19d80cf9f958e65724ff5e97bc2f76f2f4c5194c740016606c87bd1/nh3-0.3.3-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:92a958e6f6d0100e025a5686aafd67e3c98eac67495728f8bb64fbeb3e474493", size = 1056344, upload-time = "2026-02-14T09:34:41.469Z" }, - { url = "https://files.pythonhosted.org/packages/bd/7d/5bcbbc56e71b7dda7ef1d6008098da9c5426d6334137ef32bb2b9c496984/nh3-0.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9ed40cf8449a59a03aa465114fedce1ff7ac52561688811d047917cc878b19ca", size = 1034533, upload-time = "2026-02-14T09:34:43.313Z" }, - { url = "https://files.pythonhosted.org/packages/3f/9c/054eff8a59a8b23b37f0f4ac84cdd688ee84cf5251664c0e14e5d30a8a67/nh3-0.3.3-cp314-cp314t-win32.whl", hash = "sha256:b50c3770299fb2a7c1113751501e8878d525d15160a4c05194d7fe62b758aad8", size = 608305, upload-time = "2026-02-14T09:34:44.622Z" }, - { url = "https://files.pythonhosted.org/packages/d7/b0/64667b8d522c7b859717a02b1a66ba03b529ca1df623964e598af8db1ed5/nh3-0.3.3-cp314-cp314t-win_amd64.whl", hash = "sha256:21a63ccb18ddad3f784bb775955839b8b80e347e597726f01e43ca1abcc5c808", size = 620633, upload-time = "2026-02-14T09:34:46.069Z" }, - { url = "https://files.pythonhosted.org/packages/91/b5/ae9909e4ddfd86ee076c4d6d62ba69e9b31061da9d2f722936c52df8d556/nh3-0.3.3-cp314-cp314t-win_arm64.whl", hash = "sha256:f508ddd4e2433fdcb78c790fc2d24e3a349ba775e5fa904af89891321d4844a3", size = 607027, upload-time = "2026-02-14T09:34:47.91Z" }, - { url = "https://files.pythonhosted.org/packages/13/3e/aef8cf8e0419b530c95e96ae93a5078e9b36c1e6613eeb1df03a80d5194e/nh3-0.3.3-cp38-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:e8ee96156f7dfc6e30ecda650e480c5ae0a7d38f0c6fafc3c1c655e2500421d9", size = 1448640, upload-time = "2026-02-14T09:34:49.316Z" }, - { url = "https://files.pythonhosted.org/packages/ca/43/d2011a4f6c0272cb122eeff40062ee06bb2b6e57eabc3a5e057df0d582df/nh3-0.3.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45fe0d6a607264910daec30360c8a3b5b1500fd832d21b2da608256287bcb92d", size = 839405, upload-time = "2026-02-14T09:34:50.779Z" }, - { url = "https://files.pythonhosted.org/packages/f8/f3/965048510c1caf2a34ed04411a46a04a06eb05563cd06f1aa57b71eb2bc8/nh3-0.3.3-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5bc1d4b30ba1ba896669d944b6003630592665974bd11a3dc2f661bde92798a7", size = 825849, upload-time = "2026-02-14T09:34:52.622Z" }, - { url = "https://files.pythonhosted.org/packages/78/99/b4bbc6ad16329d8db2c2c320423f00b549ca3b129c2b2f9136be2606dbb0/nh3-0.3.3-cp38-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f433a2dd66545aad4a720ad1b2150edcdca75bfff6f4e6f378ade1ec138d5e77", size = 1068303, upload-time = "2026-02-14T09:34:54.179Z" }, - { url = "https://files.pythonhosted.org/packages/3f/34/3420d97065aab1b35f3e93ce9c96c8ebd423ce86fe84dee3126790421a2a/nh3-0.3.3-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52e973cb742e95b9ae1b35822ce23992428750f4b46b619fe86eba4205255b30", size = 1029316, upload-time = "2026-02-14T09:34:56.186Z" }, - { url = "https://files.pythonhosted.org/packages/f1/9a/99eda757b14e596fdb2ca5f599a849d9554181aa899274d0d183faef4493/nh3-0.3.3-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4c730617bdc15d7092dcc0469dc2826b914c8f874996d105b4bc3842a41c1cd9", size = 919944, upload-time = "2026-02-14T09:34:57.886Z" }, - { url = "https://files.pythonhosted.org/packages/6f/84/c0dc75c7fb596135f999e59a410d9f45bdabb989f1cb911f0016d22b747b/nh3-0.3.3-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e98fa3dbfd54e25487e36ba500bc29bca3a4cab4ffba18cfb1a35a2d02624297", size = 811461, upload-time = "2026-02-14T09:34:59.65Z" }, - { url = "https://files.pythonhosted.org/packages/7e/ec/b1bf57cab6230eec910e4863528dc51dcf21b57aaf7c88ee9190d62c9185/nh3-0.3.3-cp38-abi3-manylinux_2_31_riscv64.whl", hash = "sha256:3a62b8ae7c235481715055222e54c682422d0495a5c73326807d4e44c5d14691", size = 840360, upload-time = "2026-02-14T09:35:01.444Z" }, - { url = "https://files.pythonhosted.org/packages/37/5e/326ae34e904dde09af1de51219a611ae914111f0970f2f111f4f0188f57e/nh3-0.3.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fc305a2264868ec8fa16548296f803d8fd9c1fa66cd28b88b605b1bd06667c0b", size = 859872, upload-time = "2026-02-14T09:35:03.348Z" }, - { url = "https://files.pythonhosted.org/packages/09/38/7eba529ce17ab4d3790205da37deabb4cb6edcba15f27b8562e467f2fc97/nh3-0.3.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:90126a834c18af03bfd6ff9a027bfa6bbf0e238527bc780a24de6bd7cc1041e2", size = 1023550, upload-time = "2026-02-14T09:35:04.829Z" }, - { url = "https://files.pythonhosted.org/packages/05/a2/556fdecd37c3681b1edee2cf795a6799c6ed0a5551b2822636960d7e7651/nh3-0.3.3-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:24769a428e9e971e4ccfb24628f83aaa7dc3c8b41b130c8ddc1835fa1c924489", size = 1105212, upload-time = "2026-02-14T09:35:06.821Z" }, - { url = "https://files.pythonhosted.org/packages/dd/e3/5db0b0ad663234967d83702277094687baf7c498831a2d3ad3451c11770f/nh3-0.3.3-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:b7a18ee057761e455d58b9d31445c3e4b2594cff4ddb84d2e331c011ef46f462", size = 1069970, upload-time = "2026-02-14T09:35:08.504Z" }, - { url = "https://files.pythonhosted.org/packages/79/b2/2ea21b79c6e869581ce5f51549b6e185c4762233591455bf2a326fb07f3b/nh3-0.3.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5a4b2c1f3e6f3cbe7048e17f4fefad3f8d3e14cc0fd08fb8599e0d5653f6b181", size = 1047588, upload-time = "2026-02-14T09:35:09.911Z" }, - { url = "https://files.pythonhosted.org/packages/e2/92/2e434619e658c806d9c096eed2cdff9a883084299b7b19a3f0824eb8e63d/nh3-0.3.3-cp38-abi3-win32.whl", hash = "sha256:e974850b131fdffa75e7ad8e0d9c7a855b96227b093417fdf1bd61656e530f37", size = 616179, upload-time = "2026-02-14T09:35:11.366Z" }, - { url = "https://files.pythonhosted.org/packages/73/88/1ce287ef8649dc51365b5094bd3713b76454838140a32ab4f8349973883c/nh3-0.3.3-cp38-abi3-win_amd64.whl", hash = "sha256:2efd17c0355d04d39e6d79122b42662277ac10a17ea48831d90b46e5ef7e4fc0", size = 631159, upload-time = "2026-02-14T09:35:12.77Z" }, - { url = "https://files.pythonhosted.org/packages/31/f1/b4835dbde4fb06f29db89db027576d6014081cd278d9b6751facc3e69e43/nh3-0.3.3-cp38-abi3-win_arm64.whl", hash = "sha256:b838e619f483531483d26d889438e53a880510e832d2aafe73f93b7b1ac2bce2", size = 616645, upload-time = "2026-02-14T09:35:14.062Z" }, ] [[package]] @@ -575,12 +323,115 @@ wheels = [ ] [[package]] -name = "pycparser" -version = "3.0" +name = "pydantic" +version = "2.12.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" }, + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/72/74a989dd9f2084b3d9530b0915fdda64ac48831c30dbf7c72a41a5232db8/pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6", size = 2105873, upload-time = "2025-11-04T13:39:31.373Z" }, + { url = "https://files.pythonhosted.org/packages/12/44/37e403fd9455708b3b942949e1d7febc02167662bf1a7da5b78ee1ea2842/pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b", size = 1899826, upload-time = "2025-11-04T13:39:32.897Z" }, + { url = "https://files.pythonhosted.org/packages/33/7f/1d5cab3ccf44c1935a359d51a8a2a9e1a654b744b5e7f80d41b88d501eec/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a", size = 1917869, upload-time = "2025-11-04T13:39:34.469Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6a/30d94a9674a7fe4f4744052ed6c5e083424510be1e93da5bc47569d11810/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8", size = 2063890, upload-time = "2025-11-04T13:39:36.053Z" }, + { url = "https://files.pythonhosted.org/packages/50/be/76e5d46203fcb2750e542f32e6c371ffa9b8ad17364cf94bb0818dbfb50c/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e", size = 2229740, upload-time = "2025-11-04T13:39:37.753Z" }, + { url = "https://files.pythonhosted.org/packages/d3/ee/fed784df0144793489f87db310a6bbf8118d7b630ed07aa180d6067e653a/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1", size = 2350021, upload-time = "2025-11-04T13:39:40.94Z" }, + { url = "https://files.pythonhosted.org/packages/c8/be/8fed28dd0a180dca19e72c233cbf58efa36df055e5b9d90d64fd1740b828/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b", size = 2066378, upload-time = "2025-11-04T13:39:42.523Z" }, + { url = "https://files.pythonhosted.org/packages/b0/3b/698cf8ae1d536a010e05121b4958b1257f0b5522085e335360e53a6b1c8b/pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b", size = 2175761, upload-time = "2025-11-04T13:39:44.553Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ba/15d537423939553116dea94ce02f9c31be0fa9d0b806d427e0308ec17145/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284", size = 2146303, upload-time = "2025-11-04T13:39:46.238Z" }, + { url = "https://files.pythonhosted.org/packages/58/7f/0de669bf37d206723795f9c90c82966726a2ab06c336deba4735b55af431/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594", size = 2340355, upload-time = "2025-11-04T13:39:48.002Z" }, + { url = "https://files.pythonhosted.org/packages/e5/de/e7482c435b83d7e3c3ee5ee4451f6e8973cff0eb6007d2872ce6383f6398/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e", size = 2319875, upload-time = "2025-11-04T13:39:49.705Z" }, + { url = "https://files.pythonhosted.org/packages/fe/e6/8c9e81bb6dd7560e33b9053351c29f30c8194b72f2d6932888581f503482/pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b", size = 1987549, upload-time = "2025-11-04T13:39:51.842Z" }, + { url = "https://files.pythonhosted.org/packages/11/66/f14d1d978ea94d1bc21fc98fcf570f9542fe55bfcc40269d4e1a21c19bf7/pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe", size = 2011305, upload-time = "2025-11-04T13:39:53.485Z" }, + { url = "https://files.pythonhosted.org/packages/56/d8/0e271434e8efd03186c5386671328154ee349ff0354d83c74f5caaf096ed/pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f", size = 1972902, upload-time = "2025-11-04T13:39:56.488Z" }, + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, + { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, + { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, + { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, + { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, + { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, + { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, + { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, + { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, + { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, + { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, + { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, + { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, + { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, + { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, + { url = "https://files.pythonhosted.org/packages/11/72/90fda5ee3b97e51c494938a4a44c3a35a9c96c19bba12372fb9c634d6f57/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034", size = 2115441, upload-time = "2025-11-04T13:42:39.557Z" }, + { url = "https://files.pythonhosted.org/packages/1f/53/8942f884fa33f50794f119012dc6a1a02ac43a56407adaac20463df8e98f/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c", size = 1930291, upload-time = "2025-11-04T13:42:42.169Z" }, + { url = "https://files.pythonhosted.org/packages/79/c8/ecb9ed9cd942bce09fc888ee960b52654fbdbede4ba6c2d6e0d3b1d8b49c/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2", size = 1948632, upload-time = "2025-11-04T13:42:44.564Z" }, + { url = "https://files.pythonhosted.org/packages/2e/1b/687711069de7efa6af934e74f601e2a4307365e8fdc404703afc453eab26/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad", size = 2138905, upload-time = "2025-11-04T13:42:47.156Z" }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, + { url = "https://files.pythonhosted.org/packages/5f/9b/1b3f0e9f9305839d7e84912f9e8bfbd191ed1b1ef48083609f0dabde978c/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26", size = 2101980, upload-time = "2025-11-04T13:43:25.97Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ed/d71fefcb4263df0da6a85b5d8a7508360f2f2e9b3bf5814be9c8bccdccc1/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808", size = 1923865, upload-time = "2025-11-04T13:43:28.763Z" }, + { url = "https://files.pythonhosted.org/packages/ce/3a/626b38db460d675f873e4444b4bb030453bbe7b4ba55df821d026a0493c4/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc", size = 2134256, upload-time = "2025-11-04T13:43:31.71Z" }, + { url = "https://files.pythonhosted.org/packages/83/d9/8412d7f06f616bbc053d30cb4e5f76786af3221462ad5eee1f202021eb4e/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1", size = 2174762, upload-time = "2025-11-04T13:43:34.744Z" }, + { url = "https://files.pythonhosted.org/packages/55/4c/162d906b8e3ba3a99354e20faa1b49a85206c47de97a639510a0e673f5da/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84", size = 2143141, upload-time = "2025-11-04T13:43:37.701Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f2/f11dd73284122713f5f89fc940f370d035fa8e1e078d446b3313955157fe/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770", size = 2330317, upload-time = "2025-11-04T13:43:40.406Z" }, + { url = "https://files.pythonhosted.org/packages/88/9d/b06ca6acfe4abb296110fb1273a4d848a0bfb2ff65f3ee92127b3244e16b/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f", size = 2316992, upload-time = "2025-11-04T13:43:43.602Z" }, + { url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302, upload-time = "2025-11-04T13:43:46.64Z" }, ] [[package]] @@ -621,15 +472,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a7/4b/8b78d126e275efa2379b1c2e09dc52cf70df16fc3b90613ef82531499d73/pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a", size = 21949, upload-time = "2023-05-24T18:44:54.079Z" }, ] -[[package]] -name = "pywin32-ctypes" -version = "0.2.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/85/9f/01a1a99704853cb63f253eea009390c88e7131c67e66a0a02099a8c917cb/pywin32-ctypes-0.2.3.tar.gz", hash = "sha256:d162dc04946d704503b2edc4d55f3dba5c1d539ead017afa00142c38b9885755", size = 29471, upload-time = "2024-08-14T10:15:34.626Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/de/3d/8161f7711c017e01ac9f008dfddd9410dff3674334c233bde66e7ba65bbf/pywin32_ctypes-0.2.3-py3-none-any.whl", hash = "sha256:8a1513379d709975552d202d942d9837758905c8d01eb82b8bcc30918929e7b8", size = 30756, upload-time = "2024-08-14T10:15:33.187Z" }, -] - [[package]] name = "pyyaml" version = "6.0.3" @@ -686,66 +528,15 @@ wheels = [ ] [[package]] -name = "readme-renderer" -version = "44.0" +name = "respx" +version = "0.22.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "docutils" }, - { name = "nh3" }, - { name = "pygments" }, + { name = "httpx" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5a/a9/104ec9234c8448c4379768221ea6df01260cd6c2ce13182d4eac531c8342/readme_renderer-44.0.tar.gz", hash = "sha256:8712034eabbfa6805cacf1402b4eeb2a73028f72d1166d6f5cb7f9c047c5d1e1", size = 32056, upload-time = "2024-07-08T15:00:57.805Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f4/7c/96bd0bc759cf009675ad1ee1f96535edcb11e9666b985717eb8c87192a95/respx-0.22.0.tar.gz", hash = "sha256:3c8924caa2a50bd71aefc07aa812f2466ff489f1848c96e954a5362d17095d91", size = 28439, upload-time = "2024-12-19T22:33:59.374Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e1/67/921ec3024056483db83953ae8e48079ad62b92db7880013ca77632921dd0/readme_renderer-44.0-py3-none-any.whl", hash = "sha256:2fbca89b81a08526aadf1357a8c2ae889ec05fb03f5da67f9769c9a592166151", size = 13310, upload-time = "2024-07-08T15:00:56.577Z" }, -] - -[[package]] -name = "requests" -version = "2.32.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "charset-normalizer" }, - { name = "idna" }, - { name = "urllib3" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, -] - -[[package]] -name = "requests-toolbelt" -version = "1.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "requests" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/f3/61/d7545dafb7ac2230c70d38d31cbfe4cc64f7144dc41f6e4e4b78ecd9f5bb/requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6", size = 206888, upload-time = "2023-05-01T04:11:33.229Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06", size = 54481, upload-time = "2023-05-01T04:11:28.427Z" }, -] - -[[package]] -name = "rfc3986" -version = "2.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/85/40/1520d68bfa07ab5a6f065a186815fb6610c86fe957bc065754e47f7b0840/rfc3986-2.0.0.tar.gz", hash = "sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c", size = 49026, upload-time = "2022-01-10T00:52:30.832Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ff/9a/9afaade874b2fa6c752c36f1548f718b5b83af81ed9b76628329dab81c1b/rfc3986-2.0.0-py2.py3-none-any.whl", hash = "sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd", size = 31326, upload-time = "2022-01-10T00:52:29.594Z" }, -] - -[[package]] -name = "rich" -version = "14.3.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "markdown-it-py" }, - { name = "pygments" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b3/c6/f3b320c27991c46f43ee9d856302c70dc2d0fb2dba4842ff739d5f46b393/rich-14.3.3.tar.gz", hash = "sha256:b8daa0b9e4eef54dd8cf7c86c03713f53241884e814f4e2f5fb342fe520f639b", size = 230582, upload-time = "2026-02-19T17:23:12.474Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/14/25/b208c5683343959b670dc001595f2f3737e051da617f66c31f7c4fa93abc/rich-14.3.3-py3-none-any.whl", hash = "sha256:793431c1f8619afa7d3b52b2cdec859562b950ea0d4b6b505397612db8d5362d", size = 310458, upload-time = "2026-02-19T17:23:13.732Z" }, + { url = "https://files.pythonhosted.org/packages/8e/67/afbb0978d5399bc9ea200f1d4489a23c9a1dad4eee6376242b8182389c79/respx-0.22.0-py2.py3-none-any.whl", hash = "sha256:631128d4c9aba15e56903fb5f66fb1eff412ce28dd387ca3a81339e52dbd3ad0", size = 25127, upload-time = "2024-12-19T22:33:57.837Z" }, ] [[package]] @@ -774,19 +565,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/28/7e/61c42657f6e4614a4258f1c3b0c5b93adc4d1f8575f5229d1906b483099b/ruff-0.12.12-py3-none-win_arm64.whl", hash = "sha256:2a8199cab4ce4d72d158319b63370abf60991495fb733db96cd923a34c52d093", size = 12256762, upload-time = "2025-09-04T16:50:15.737Z" }, ] -[[package]] -name = "secretstorage" -version = "3.5.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cryptography" }, - { name = "jeepney" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/1c/03/e834bcd866f2f8a49a85eaff47340affa3bfa391ee9912a952a1faa68c7b/secretstorage-3.5.0.tar.gz", hash = "sha256:f04b8e4689cbce351744d5537bf6b1329c6fc68f91fa666f60a380edddcd11be", size = 19884, upload-time = "2025-11-23T19:02:53.191Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/46/f5af3402b579fd5e11573ce652019a67074317e18c1935cc0b4ba9b35552/secretstorage-3.5.0-py3-none-any.whl", hash = "sha256:0ce65888c0725fcb2c5bc0fdb8e5438eece02c523557ea40ce0703c266248137", size = 15554, upload-time = "2025-11-23T19:02:51.545Z" }, -] - [[package]] name = "tomli" version = "2.4.0" @@ -842,32 +620,24 @@ wheels = [ ] [[package]] -name = "twine" -version = "6.2.0" +name = "typing-extensions" +version = "4.15.0" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "id" }, - { name = "keyring", marker = "platform_machine != 'ppc64le' and platform_machine != 's390x'" }, - { name = "packaging" }, - { name = "readme-renderer" }, - { name = "requests" }, - { name = "requests-toolbelt" }, - { name = "rfc3986" }, - { name = "rich" }, - { name = "urllib3" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e0/a8/949edebe3a82774c1ec34f637f5dd82d1cf22c25e963b7d63771083bbee5/twine-6.2.0.tar.gz", hash = "sha256:e5ed0d2fd70c9959770dce51c8f39c8945c574e18173a7b81802dab51b4b75cf", size = 172262, upload-time = "2025-09-04T15:43:17.255Z" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/7a/882d99539b19b1490cac5d77c67338d126e4122c8276bf640e411650c830/twine-6.2.0-py3-none-any.whl", hash = "sha256:418ebf08ccda9a8caaebe414433b0ba5e25eb5e4a927667122fbe8f829f985d8", size = 42727, upload-time = "2025-09-04T15:43:15.994Z" }, + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, ] [[package]] -name = "urllib3" -version = "2.6.3" +name = "typing-inspection" +version = "0.4.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, ] [[package]] @@ -883,12 +653,3 @@ sdist = { url = "https://files.pythonhosted.org/packages/ed/54/809199edc537dbace wheels = [ { url = "https://files.pythonhosted.org/packages/f7/b4/8268da45f26f4fe84f6eae80a6ca1485ffb490a926afecff75fc48f61979/virtualenv-20.39.0-py3-none-any.whl", hash = "sha256:44888bba3775990a152ea1f73f8e5f566d49f11bbd1de61d426fd7732770043e", size = 5839121, upload-time = "2026-02-23T18:09:11.173Z" }, ] - -[[package]] -name = "zipp" -version = "3.23.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, -] From 60628529d7faaa5bd53a732f85cf045c14b44880 Mon Sep 17 00:00:00 2001 From: Rana Aurangzaib Date: Wed, 25 Feb 2026 14:57:42 +0300 Subject: [PATCH 02/17] Add internal HTTP client and discovery functionality - Introduced `TeaHttpClient` for handling TEA API requests with error handling for connection, authentication, and server issues. - Added `discovery.py` for TEI parsing, fetching `.well-known/tea` documents, and selecting compatible endpoints. - Expanded Pydantic models in `models.py` to include new data structures for TEA API objects. - Implemented unit tests for HTTP client and discovery functions to ensure reliability and correctness. --- libtea/_http.py | 88 ++++++++++++++++++++++++ libtea/discovery.py | 53 ++++++++++++++ libtea/models.py | 138 +++++++++++++++++++++++++++++++++++++ tests/conftest.py | 17 +++++ tests/test_discovery.py | 149 ++++++++++++++++++++++++++++++++++++++++ tests/test_http.py | 105 ++++++++++++++++++++++++++++ tests/test_models.py | 147 +++++++++++++++++++++++++++++++++++++++ 7 files changed, 697 insertions(+) create mode 100644 libtea/_http.py create mode 100644 libtea/discovery.py create mode 100644 tests/conftest.py create mode 100644 tests/test_discovery.py create mode 100644 tests/test_http.py diff --git a/libtea/_http.py b/libtea/_http.py new file mode 100644 index 0000000..d0245c2 --- /dev/null +++ b/libtea/_http.py @@ -0,0 +1,88 @@ +"""Internal HTTP client wrapping httpx with TEA error handling.""" + +from pathlib import Path +from typing import Any + +import httpx + +from libtea.exceptions import ( + TeaAuthenticationError, + TeaConnectionError, + TeaNotFoundError, + TeaRequestError, + TeaServerError, +) + + +class TeaHttpClient: + """Low-level HTTP client for TEA API requests.""" + + def __init__( + self, + base_url: str, + *, + token: str | None = None, + timeout: float = 30.0, + ): + headers = {"user-agent": "py-libtea"} + if token: + headers["authorization"] = f"Bearer {token}" + + self._timeout = timeout + self._client = httpx.Client( + base_url=base_url, + headers=headers, + timeout=timeout, + ) + + def get_json(self, path: str, *, params: dict[str, Any] | None = None) -> Any: + """Send GET request and return parsed JSON.""" + try: + response = self._client.get(path, params=params) + except httpx.TransportError as exc: + raise TeaConnectionError(str(exc)) from exc + + self._raise_for_status(response) + return response.json() + + def download(self, url: str, dest: Path) -> None: + """Download a file from an absolute URL to dest path.""" + try: + with self._client.stream("GET", url) as response: + self._raise_for_status(response) + with open(dest, "wb") as f: + for chunk in response.iter_bytes(chunk_size=8192): + f.write(chunk) + except httpx.TransportError as exc: + raise TeaConnectionError(str(exc)) from exc + + def close(self) -> None: + self._client.close() + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + @staticmethod + def _raise_for_status(response: httpx.Response) -> None: + """Map HTTP status codes to typed exceptions.""" + status = response.status_code + if 200 <= status < 300: + return + + if status in (401, 403): + raise TeaAuthenticationError(f"Authentication failed: HTTP {status}") + elif status == 404: + error_type = None + try: + body = response.json() + error_type = body.get("error") + except Exception: + pass + raise TeaNotFoundError(f"Not found: HTTP {status}", error_type=error_type) + elif 400 <= status < 500: + raise TeaRequestError(f"Client error: HTTP {status}") + elif status >= 500: + raise TeaServerError(f"Server error: HTTP {status}") diff --git a/libtea/discovery.py b/libtea/discovery.py new file mode 100644 index 0000000..702b15b --- /dev/null +++ b/libtea/discovery.py @@ -0,0 +1,53 @@ +"""TEI parsing, .well-known/tea fetching, and endpoint selection.""" + +import httpx + +from libtea.exceptions import TeaDiscoveryError +from libtea.models import TeaEndpoint, TeaWellKnown + + +def parse_tei(tei: str) -> tuple[str, str, str]: + """Parse a TEI URN into (type, domain, identifier). + + TEI format: urn:tei::: + The identifier may contain colons (e.g. hash type). + """ + parts = tei.split(":") + if len(parts) < 5 or parts[0] != "urn" or parts[1] != "tei": + raise TeaDiscoveryError(f"Invalid TEI: {tei!r}. Expected format: urn:tei:::") + + tei_type = parts[2] + domain = parts[3] + identifier = ":".join(parts[4:]) + return tei_type, domain, identifier + + +def fetch_well_known(domain: str, *, timeout: float = 10.0) -> TeaWellKnown: + """Fetch and parse the .well-known/tea document from a domain via HTTPS.""" + url = f"https://{domain}/.well-known/tea" + try: + response = httpx.get(url, timeout=timeout, follow_redirects=True) + response.raise_for_status() + except httpx.HTTPStatusError as exc: + raise TeaDiscoveryError(f"Failed to fetch {url}: HTTP {exc.response.status_code}") from exc + except httpx.TransportError as exc: + raise TeaDiscoveryError(f"Failed to connect to {url}: {exc}") from exc + + return TeaWellKnown.model_validate(response.json()) + + +def select_endpoint(well_known: TeaWellKnown, supported_version: str) -> TeaEndpoint: + """Select the best endpoint that supports the given version. + + Prefers endpoints with the requested version, then by highest priority. + """ + candidates = [ep for ep in well_known.endpoints if supported_version in ep.versions] + + if not candidates: + available = {v for ep in well_known.endpoints for v in ep.versions} + raise TeaDiscoveryError( + f"No compatible endpoint found for version {supported_version!r}. Available versions: {sorted(available)}" + ) + + candidates.sort(key=lambda ep: ep.priority if ep.priority is not None else 1.0, reverse=True) + return candidates[0] diff --git a/libtea/models.py b/libtea/models.py index 451447b..1e7686c 100644 --- a/libtea/models.py +++ b/libtea/models.py @@ -1,6 +1,8 @@ """Pydantic data models for TEA API objects.""" +from datetime import datetime from enum import StrEnum +from typing import Literal from pydantic import BaseModel, ConfigDict, field_validator from pydantic.alias_generators import to_camel @@ -96,3 +98,139 @@ def normalize_alg_type(cls, v: str) -> str: if mapped is not None: return mapped return v + + +# --- Domain objects --- + + +class ReleaseDistribution(_TeaModel): + distribution_type: str + description: str | None = None + identifiers: list[Identifier] = [] + url: str | None = None + signature_url: str | None = None + checksums: list[Checksum] = [] + + +class ArtifactFormat(_TeaModel): + media_type: str + description: str | None = None + url: str + signature_url: str | None = None + checksums: list[Checksum] = [] + + +class Artifact(_TeaModel): + uuid: str + name: str + type: ArtifactType + distribution_types: list[str] | None = None + formats: list[ArtifactFormat] = [] + + +class CollectionUpdateReason(_TeaModel): + type: CollectionUpdateReasonType + comment: str | None = None + + +class Collection(_TeaModel): + uuid: str + version: int + date: datetime | None = None + belongs_to: CollectionBelongsTo | None = None + update_reason: CollectionUpdateReason | None = None + artifacts: list[Artifact] = [] + + +class ComponentRef(_TeaModel): + uuid: str + release: str | None = None + + +class Component(_TeaModel): + uuid: str + name: str + identifiers: list[Identifier] + + +class Release(_TeaModel): + uuid: str + component: str | None = None + component_name: str | None = None + version: str + created_date: datetime + release_date: datetime | None = None + pre_release: bool | None = None + identifiers: list[Identifier] = [] + distributions: list[ReleaseDistribution] = [] + + +class ComponentReleaseWithCollection(_TeaModel): + release: Release + latest_collection: Collection + + +class Product(_TeaModel): + uuid: str + name: str + identifiers: list[Identifier] + + +class ProductRelease(_TeaModel): + uuid: str + product: str | None = None + product_name: str | None = None + version: str + created_date: datetime + release_date: datetime | None = None + pre_release: bool | None = None + identifiers: list[Identifier] = [] + components: list[ComponentRef] + + +class ErrorResponse(_TeaModel): + error: ErrorType + + +# --- Pagination --- + + +class PaginatedProductResponse(_TeaModel): + timestamp: datetime + page_start_index: int + page_size: int + total_results: int + results: list[Product] = [] + + +class PaginatedProductReleaseResponse(_TeaModel): + timestamp: datetime + page_start_index: int + page_size: int + total_results: int + results: list[ProductRelease] = [] + + +# --- Discovery types --- + + +class TeaEndpoint(_TeaModel): + url: str + versions: list[str] + priority: float | None = None + + +class TeaWellKnown(_TeaModel): + schema_version: Literal[1] + endpoints: list[TeaEndpoint] + + +class TeaServerInfo(_TeaModel): + root_url: str + versions: list[str] + priority: float | None = None + + +class DiscoveryInfo(_TeaModel): + product_release_uuid: str + servers: list[TeaServerInfo] diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..49ef8ba --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,17 @@ +import pytest + +from libtea._http import TeaHttpClient + +BASE_URL = "https://api.example.com/tea/v1" + + +@pytest.fixture +def base_url(): + return BASE_URL + + +@pytest.fixture +def http_client(): + c = TeaHttpClient(base_url=BASE_URL) + yield c + c.close() diff --git a/tests/test_discovery.py b/tests/test_discovery.py new file mode 100644 index 0000000..5b8823d --- /dev/null +++ b/tests/test_discovery.py @@ -0,0 +1,149 @@ +import httpx +import pytest +import respx + +from libtea.discovery import fetch_well_known, parse_tei, select_endpoint +from libtea.exceptions import TeaDiscoveryError +from libtea.models import TeaEndpoint, TeaWellKnown + + +class TestParseTei: + def test_uuid_tei(self): + tei = "urn:tei:uuid:products.example.com:d4d9f54a-abcf-11ee-ac79-1a52914d44b1" + tei_type, domain, identifier = parse_tei(tei) + assert tei_type == "uuid" + assert domain == "products.example.com" + assert identifier == "d4d9f54a-abcf-11ee-ac79-1a52914d44b1" + + def test_purl_tei(self): + tei = "urn:tei:purl:cyclonedx.org:pkg:pypi/cyclonedx-python-lib@8.4.0" + tei_type, domain, identifier = parse_tei(tei) + assert tei_type == "purl" + assert domain == "cyclonedx.org" + assert identifier == "pkg:pypi/cyclonedx-python-lib@8.4.0" + + def test_hash_tei(self): + tei = "urn:tei:hash:cyclonedx.org:SHA256:fd44efd601f651c8865acf0dfeacb0df19a2b50ec69ead0262096fd2f67197b9" + tei_type, domain, identifier = parse_tei(tei) + assert tei_type == "hash" + assert domain == "cyclonedx.org" + assert identifier == "SHA256:fd44efd601f651c8865acf0dfeacb0df19a2b50ec69ead0262096fd2f67197b9" + + def test_invalid_tei_no_urn_prefix(self): + with pytest.raises(TeaDiscoveryError, match="Invalid TEI"): + parse_tei("not-a-tei") + + def test_invalid_tei_wrong_prefix(self): + with pytest.raises(TeaDiscoveryError, match="Invalid TEI"): + parse_tei("urn:other:uuid:example.com:123") + + def test_invalid_tei_too_few_parts(self): + with pytest.raises(TeaDiscoveryError, match="Invalid TEI"): + parse_tei("urn:tei:uuid") + + def test_invalid_tei_empty_string(self): + with pytest.raises(TeaDiscoveryError, match="Invalid TEI"): + parse_tei("") + + def test_tei_with_slash_in_purl_identifier(self): + tei = "urn:tei:purl:cyclonedx.org:pkg:maven/org.apache/log4j@2.24.3" + tei_type, domain, identifier = parse_tei(tei) + assert tei_type == "purl" + assert domain == "cyclonedx.org" + assert identifier == "pkg:maven/org.apache/log4j@2.24.3" + + +class TestFetchWellKnown: + @respx.mock + def test_fetch_well_known_success(self): + respx.get("https://example.com/.well-known/tea").mock( + return_value=httpx.Response( + 200, + json={ + "schemaVersion": 1, + "endpoints": [{"url": "https://api.example.com", "versions": ["1.0.0"]}], + }, + ) + ) + wk = fetch_well_known("example.com") + assert wk.schema_version == 1 + assert len(wk.endpoints) == 1 + + @respx.mock + def test_fetch_well_known_404_raises_discovery_error(self): + respx.get("https://example.com/.well-known/tea").mock(return_value=httpx.Response(404)) + with pytest.raises(TeaDiscoveryError, match="HTTP 404"): + fetch_well_known("example.com") + + @respx.mock + def test_fetch_well_known_connection_error(self): + respx.get("https://example.com/.well-known/tea").mock(side_effect=httpx.ConnectError("refused")) + with pytest.raises(TeaDiscoveryError, match="Failed to connect"): + fetch_well_known("example.com") + + @respx.mock + def test_fetch_well_known_500_raises_discovery_error(self): + respx.get("https://example.com/.well-known/tea").mock(return_value=httpx.Response(500)) + with pytest.raises(TeaDiscoveryError): + fetch_well_known("example.com") + + +class TestSelectEndpoint: + def _make_well_known(self, endpoints: list[dict]) -> TeaWellKnown: + return TeaWellKnown( + schema_version=1, + endpoints=[TeaEndpoint(**ep) for ep in endpoints], + ) + + def test_selects_matching_version(self): + wk = self._make_well_known( + [ + {"url": "https://api.example.com", "versions": ["1.0.0"]}, + ] + ) + ep = select_endpoint(wk, "1.0.0") + assert ep.url == "https://api.example.com" + + def test_selects_highest_priority(self): + wk = self._make_well_known( + [ + {"url": "https://low.example.com", "versions": ["1.0.0"], "priority": 0.5}, + {"url": "https://high.example.com", "versions": ["1.0.0"], "priority": 1.0}, + ] + ) + ep = select_endpoint(wk, "1.0.0") + assert ep.url == "https://high.example.com" + + def test_no_matching_version_raises(self): + wk = self._make_well_known( + [ + {"url": "https://api.example.com", "versions": ["2.0.0"]}, + ] + ) + with pytest.raises(TeaDiscoveryError, match="No compatible endpoint"): + select_endpoint(wk, "1.0.0") + + def test_prefers_highest_matching_version(self): + wk = self._make_well_known( + [ + {"url": "https://old.example.com", "versions": ["0.1.0"]}, + {"url": "https://new.example.com", "versions": ["0.1.0", "1.0.0"]}, + ] + ) + ep = select_endpoint(wk, "1.0.0") + assert ep.url == "https://new.example.com" + + def test_empty_endpoints_raises(self): + wk = TeaWellKnown(schema_version=1, endpoints=[]) + with pytest.raises(TeaDiscoveryError, match="No compatible endpoint"): + select_endpoint(wk, "1.0.0") + + def test_none_priority_vs_explicit_priority(self): + wk = self._make_well_known( + [ + {"url": "https://none-priority.example.com", "versions": ["1.0.0"]}, + {"url": "https://high-priority.example.com", "versions": ["1.0.0"], "priority": 2.0}, + ] + ) + ep = select_endpoint(wk, "1.0.0") + assert ep.url == "https://high-priority.example.com" diff --git a/tests/test_http.py b/tests/test_http.py new file mode 100644 index 0000000..49bae32 --- /dev/null +++ b/tests/test_http.py @@ -0,0 +1,105 @@ +import httpx +import pytest +import respx + +from libtea._http import TeaHttpClient +from libtea.exceptions import ( + TeaAuthenticationError, + TeaConnectionError, + TeaNotFoundError, + TeaRequestError, + TeaServerError, +) + + +class TestTeaHttpClient: + @respx.mock + def test_get_json_success(self, http_client, base_url): + respx.get(f"{base_url}/product/abc").mock( + return_value=httpx.Response(200, json={"uuid": "abc", "name": "Test"}) + ) + data = http_client.get_json("/product/abc") + assert data == {"uuid": "abc", "name": "Test"} + + @respx.mock + def test_get_json_with_bearer_token(self, base_url): + route = respx.get(f"{base_url}/product/abc").mock(return_value=httpx.Response(200, json={"uuid": "abc"})) + client = TeaHttpClient(base_url=base_url, token="my-token") + client.get_json("/product/abc") + assert route.calls[0].request.headers["authorization"] == "Bearer my-token" + client.close() + + @respx.mock + def test_404_raises_not_found_with_error_type(self, http_client, base_url): + respx.get(f"{base_url}/product/missing").mock( + return_value=httpx.Response(404, json={"error": "OBJECT_UNKNOWN"}) + ) + with pytest.raises(TeaNotFoundError) as exc_info: + http_client.get_json("/product/missing") + assert exc_info.value.error_type == "OBJECT_UNKNOWN" + + @respx.mock + def test_404_with_object_not_shareable(self, http_client, base_url): + respx.get(f"{base_url}/product/restricted").mock( + return_value=httpx.Response(404, json={"error": "OBJECT_NOT_SHAREABLE"}) + ) + with pytest.raises(TeaNotFoundError) as exc_info: + http_client.get_json("/product/restricted") + assert exc_info.value.error_type == "OBJECT_NOT_SHAREABLE" + + @respx.mock + def test_404_with_non_json_body(self, http_client, base_url): + respx.get(f"{base_url}/product/missing").mock(return_value=httpx.Response(404, content=b"Not Found")) + with pytest.raises(TeaNotFoundError) as exc_info: + http_client.get_json("/product/missing") + assert exc_info.value.error_type is None + + @respx.mock + def test_401_raises_auth_error(self, http_client, base_url): + respx.get(f"{base_url}/product/abc").mock(return_value=httpx.Response(401)) + with pytest.raises(TeaAuthenticationError): + http_client.get_json("/product/abc") + + @respx.mock + def test_403_raises_auth_error(self, http_client, base_url): + respx.get(f"{base_url}/product/abc").mock(return_value=httpx.Response(403)) + with pytest.raises(TeaAuthenticationError): + http_client.get_json("/product/abc") + + @respx.mock + def test_400_raises_request_error(self, http_client, base_url): + respx.get(f"{base_url}/product/abc").mock(return_value=httpx.Response(400)) + with pytest.raises(TeaRequestError): + http_client.get_json("/product/abc") + + @respx.mock + def test_500_raises_server_error(self, http_client, base_url): + respx.get(f"{base_url}/product/abc").mock(return_value=httpx.Response(500)) + with pytest.raises(TeaServerError): + http_client.get_json("/product/abc") + + @respx.mock + def test_502_raises_server_error(self, http_client, base_url): + respx.get(f"{base_url}/product/abc").mock(return_value=httpx.Response(502)) + with pytest.raises(TeaServerError): + http_client.get_json("/product/abc") + + @respx.mock + def test_connection_error(self, http_client, base_url): + respx.get(f"{base_url}/product/abc").mock(side_effect=httpx.ConnectError("refused")) + with pytest.raises(TeaConnectionError): + http_client.get_json("/product/abc") + + @respx.mock + def test_timeout_raises_connection_error(self, http_client, base_url): + respx.get(f"{base_url}/product/abc").mock(side_effect=httpx.TimeoutException("timed out")) + with pytest.raises(TeaConnectionError, match="timed out"): + http_client.get_json("/product/abc") + + @respx.mock + def test_stream_to_file(self, http_client, base_url, tmp_path): + content = b"file content here" + respx.get("https://artifacts.example.com/sbom.xml").mock(return_value=httpx.Response(200, content=content)) + dest = tmp_path / "sbom.xml" + http_client.download(url="https://artifacts.example.com/sbom.xml", dest=dest) + assert dest.read_bytes() == content diff --git a/tests/test_models.py b/tests/test_models.py index 3e03f72..f8ee571 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -2,14 +2,19 @@ from pydantic import ValidationError from libtea.models import ( + ArtifactFormat, ArtifactType, Checksum, ChecksumAlgorithm, + Collection, CollectionBelongsTo, CollectionUpdateReasonType, ErrorType, Identifier, IdentifierType, + PaginatedProductResponse, + Product, + Release, ) @@ -173,3 +178,145 @@ def test_error_type_values(self): assert ErrorType.OBJECT_UNKNOWN == "OBJECT_UNKNOWN" assert ErrorType.OBJECT_NOT_SHAREABLE == "OBJECT_NOT_SHAREABLE" assert isinstance(ErrorType.OBJECT_UNKNOWN, str) + + +class TestProduct: + def test_product_from_json(self): + data = { + "uuid": "09e8c73b-ac45-4475-acac-33e6a7314e6d", + "name": "Apache Log4j 2", + "identifiers": [ + {"idType": "CPE", "idValue": "cpe:2.3:a:apache:log4j"}, + {"idType": "PURL", "idValue": "pkg:maven/org.apache.logging.log4j/log4j-api"}, + ], + } + product = Product.model_validate(data) + assert product.uuid == "09e8c73b-ac45-4475-acac-33e6a7314e6d" + assert product.name == "Apache Log4j 2" + assert len(product.identifiers) == 2 + assert product.identifiers[0].id_type == IdentifierType.CPE + + +class TestRelease: + def test_release_from_json(self): + data = { + "uuid": "605d0ecb-1057-40e4-9abf-c400b10f0345", + "version": "11.0.7", + "createdDate": "2025-05-07T18:08:00Z", + "releaseDate": "2025-05-12T18:08:00Z", + "identifiers": [{"idType": "PURL", "idValue": "pkg:maven/org.apache.tomcat/tomcat@11.0.7"}], + "distributions": [ + { + "distributionType": "zip", + "description": "Core binary distribution", + "checksums": [ + { + "algType": "SHA-256", + "algValue": "9da736a1cdd27231e70187cbc67398d29ca0b714f885e7032da9f1fb247693c1", + } + ], + "url": "https://repo.maven.apache.org/maven2/tomcat-11.0.7.zip", + } + ], + } + release = Release.model_validate(data) + assert release.version == "11.0.7" + assert release.distributions[0].distribution_type == "zip" + assert release.distributions[0].checksums[0].alg_type == ChecksumAlgorithm.SHA_256 + + +class TestCollection: + def test_collection_from_json(self): + data = { + "uuid": "4c72fe22-9d83-4c2f-8eba-d6db484f32c8", + "version": 3, + "date": "2024-12-13T00:00:00Z", + "belongsTo": "COMPONENT_RELEASE", + "updateReason": {"type": "ARTIFACT_UPDATED", "comment": "VDR file updated"}, + "artifacts": [ + { + "uuid": "1cb47b95-8bf8-3bad-a5a4-0d54d86e10ce", + "name": "Build SBOM", + "type": "BOM", + "formats": [ + { + "mediaType": "application/vnd.cyclonedx+xml", + "description": "CycloneDX SBOM (XML)", + "url": "https://repo.maven.apache.org/maven2/log4j-core-2.24.3-cyclonedx.xml", + "checksums": [{"algType": "SHA-1", "algValue": "5a7d4caef63c5c5ccdf07c39337323529eb5a770"}], + } + ], + } + ], + } + collection = Collection.model_validate(data) + assert collection.version == 3 + assert collection.belongs_to == CollectionBelongsTo.COMPONENT_RELEASE + assert collection.update_reason.type == CollectionUpdateReasonType.ARTIFACT_UPDATED + assert collection.artifacts[0].type == ArtifactType.BOM + assert collection.artifacts[0].formats[0].media_type == "application/vnd.cyclonedx+xml" + + +class TestOptionalFields: + def test_release_minimal_fields(self): + data = { + "uuid": "r-1", + "version": "1.0.0", + "createdDate": "2024-01-01T00:00:00Z", + } + release = Release.model_validate(data) + assert release.release_date is None + assert release.pre_release is None + assert release.component is None + assert release.distributions == [] + assert release.identifiers == [] + + def test_collection_minimal_fields(self): + data = {"uuid": "c-1", "version": 1} + collection = Collection.model_validate(data) + assert collection.date is None + assert collection.belongs_to is None + assert collection.update_reason is None + assert collection.artifacts == [] + + def test_artifact_format_minimal_fields(self): + data = { + "mediaType": "application/json", + "url": "https://example.com/sbom.json", + } + fmt = ArtifactFormat.model_validate(data) + assert fmt.description is None + assert fmt.signature_url is None + assert fmt.checksums == [] + + def test_paginated_product_response_empty_results(self): + data = { + "timestamp": "2024-03-20T15:30:00Z", + "pageStartIndex": 0, + "pageSize": 100, + "totalResults": 0, + "results": [], + } + resp = PaginatedProductResponse.model_validate(data) + assert resp.total_results == 0 + assert resp.results == [] + + +class TestPaginatedResponse: + def test_paginated_product_response(self): + data = { + "timestamp": "2024-03-20T15:30:00Z", + "pageStartIndex": 0, + "pageSize": 100, + "totalResults": 1, + "results": [ + { + "uuid": "09e8c73b-ac45-4475-acac-33e6a7314e6d", + "name": "Apache Log4j 2", + "identifiers": [{"idType": "PURL", "idValue": "pkg:maven/org.apache.logging.log4j/log4j-api"}], + } + ], + } + resp = PaginatedProductResponse.model_validate(data) + assert resp.total_results == 1 + assert resp.results[0].name == "Apache Log4j 2" From 409c9b35627e1aa6cbc696ea32d144368062dc28 Mon Sep 17 00:00:00 2001 From: Rana Aurangzaib Date: Wed, 25 Feb 2026 15:05:15 +0300 Subject: [PATCH 03/17] Add TeaClient and download functionality with checksum verification - Introduced `TeaClient` as the main entry point for interacting with the TEA API, providing methods for product and component retrieval, as well as artifact downloads. - Implemented `download_artifact` method in `TeaClient` to support downloading files with optional checksum verification. - Enhanced `TeaHttpClient` with `download_with_hashes` method to compute checksums during file downloads. - Added unit tests for `TeaClient` and download functionality, ensuring correct behavior for various scenarios including checksum validation and error handling. - Updated test fixtures to accommodate the new client structure. --- libtea/__init__.py | 3 + libtea/_http.py | 58 +++++++- libtea/client.py | 187 ++++++++++++++++++++++++ tests/conftest.py | 8 ++ tests/test_client.py | 312 +++++++++++++++++++++++++++++++++++++++++ tests/test_download.py | 68 +++++++++ tests/test_http.py | 2 +- tests/test_version.py | 18 +++ 8 files changed, 648 insertions(+), 8 deletions(-) create mode 100644 libtea/client.py create mode 100644 tests/test_client.py create mode 100644 tests/test_download.py diff --git a/libtea/__init__.py b/libtea/__init__.py index c9f8f84..ddcf4c2 100644 --- a/libtea/__init__.py +++ b/libtea/__init__.py @@ -2,4 +2,7 @@ from importlib.metadata import version +from libtea.client import TeaClient + __version__ = version("libtea") +__all__ = ["TeaClient", "__version__"] diff --git a/libtea/_http.py b/libtea/_http.py index d0245c2..825af78 100644 --- a/libtea/_http.py +++ b/libtea/_http.py @@ -1,5 +1,6 @@ """Internal HTTP client wrapping httpx with TEA error handling.""" +import hashlib from pathlib import Path from typing import Any @@ -45,17 +46,60 @@ def get_json(self, path: str, *, params: dict[str, Any] | None = None) -> Any: self._raise_for_status(response) return response.json() - def download(self, url: str, dest: Path) -> None: - """Download a file from an absolute URL to dest path.""" + def download_with_hashes(self, url: str, dest: Path, algorithms: list[str] | None = None) -> dict[str, str]: + """Download a file and compute checksums on-the-fly. Returns {algorithm: hex_digest}. + + Uses a separate unauthenticated httpx client so that the bearer token + is not leaked to third-party artifact hosts (CDNs, Maven Central, etc.). + """ + from libtea.exceptions import TeaChecksumError + + hashers: dict[str, hashlib._Hash] = {} + if algorithms: + alg_map = { + "MD5": "md5", + "SHA-1": "sha1", + "SHA-256": "sha256", + "SHA-384": "sha384", + "SHA-512": "sha512", + "SHA3-256": "sha3_256", + "SHA3-384": "sha3_384", + "SHA3-512": "sha3_512", + "BLAKE2b-256": "blake2b", + "BLAKE2b-384": "blake2b", + "BLAKE2b-512": "blake2b", + } + blake2b_sizes = {"BLAKE2b-256": 32, "BLAKE2b-384": 48, "BLAKE2b-512": 64} + for alg in algorithms: + if alg == "BLAKE3": + raise TeaChecksumError( + "BLAKE3 is not supported by Python's hashlib. " + "Install the 'blake3' package or use a different algorithm.", + algorithm="BLAKE3", + ) + hashlib_name = alg_map.get(alg) + if hashlib_name == "blake2b": + hashers[alg] = hashlib.blake2b(digest_size=blake2b_sizes[alg]) + elif hashlib_name: + hashers[alg] = hashlib.new(hashlib_name) + try: - with self._client.stream("GET", url) as response: - self._raise_for_status(response) - with open(dest, "wb") as f: - for chunk in response.iter_bytes(chunk_size=8192): - f.write(chunk) + with httpx.Client( + headers={"user-agent": "py-libtea"}, + timeout=self._timeout, + ) as download_client: + with download_client.stream("GET", url) as response: + self._raise_for_status(response) + with open(dest, "wb") as f: + for chunk in response.iter_bytes(chunk_size=8192): + f.write(chunk) + for h in hashers.values(): + h.update(chunk) except httpx.TransportError as exc: raise TeaConnectionError(str(exc)) from exc + return {alg: h.hexdigest() for alg, h in hashers.items()} + def close(self) -> None: self._client.close() diff --git a/libtea/client.py b/libtea/client.py new file mode 100644 index 0000000..aea7e8d --- /dev/null +++ b/libtea/client.py @@ -0,0 +1,187 @@ +"""TeaClient - main entry point for the TEA consumer API.""" + +from pathlib import Path +from types import TracebackType +from typing import Self + +from pydantic import ValidationError + +from libtea._http import TeaHttpClient +from libtea.discovery import fetch_well_known, select_endpoint +from libtea.exceptions import TeaValidationError +from libtea.models import ( + Artifact, + Checksum, + Collection, + Component, + ComponentReleaseWithCollection, + DiscoveryInfo, + PaginatedProductReleaseResponse, + Product, + ProductRelease, + Release, +) + +TEA_SPEC_VERSION = "0.3.0-beta.2" + + +def _validate(model_cls, data): + """Validate data against a Pydantic model, wrapping errors in TeaValidationError.""" + try: + return model_cls.model_validate(data) + except ValidationError as exc: + raise TeaValidationError(f"Invalid {model_cls.__name__} response: {exc}") from exc + + +def _validate_list(model_cls, data): + """Validate a list of items against a Pydantic model.""" + try: + return [model_cls.model_validate(item) for item in data] + except ValidationError as exc: + raise TeaValidationError(f"Invalid {model_cls.__name__} response: {exc}") from exc + + +class TeaClient: + """Synchronous client for the Transparency Exchange API.""" + + def __init__( + self, + base_url: str, + *, + token: str | None = None, + timeout: float = 30.0, + ): + self._http = TeaHttpClient(base_url=base_url, token=token, timeout=timeout) + + @classmethod + def from_well_known( + cls, + domain: str, + *, + token: str | None = None, + timeout: float = 30.0, + version: str = TEA_SPEC_VERSION, + ) -> "TeaClient": + """Create a client by discovering the TEA endpoint from a domain's .well-known/tea.""" + well_known = fetch_well_known(domain, timeout=timeout) + endpoint = select_endpoint(well_known, version) + base_url = f"{endpoint.url.rstrip('/')}/v{version}" + return cls(base_url=base_url, token=token, timeout=timeout) + + # --- Discovery --- + + def discover(self, tei: str) -> list[DiscoveryInfo]: + # httpx auto-encodes query params — do NOT pre-encode with quote() + data = self._http.get_json("/discovery", params={"tei": tei}) + return _validate_list(DiscoveryInfo, data) + + # --- Products --- + + def get_product(self, uuid: str) -> Product: + data = self._http.get_json(f"/product/{uuid}") + return _validate(Product, data) + + def get_product_releases( + self, uuid: str, *, page_offset: int = 0, page_size: int = 100 + ) -> PaginatedProductReleaseResponse: + data = self._http.get_json( + f"/product/{uuid}/releases", + params={"pageOffset": page_offset, "pageSize": page_size}, + ) + return _validate(PaginatedProductReleaseResponse, data) + + # --- Product Releases --- + + def get_product_release(self, uuid: str) -> ProductRelease: + data = self._http.get_json(f"/productRelease/{uuid}") + return _validate(ProductRelease, data) + + def get_product_release_collection_latest(self, uuid: str) -> Collection: + data = self._http.get_json(f"/productRelease/{uuid}/collection/latest") + return _validate(Collection, data) + + def get_product_release_collections(self, uuid: str) -> list[Collection]: + data = self._http.get_json(f"/productRelease/{uuid}/collections") + return _validate_list(Collection, data) + + def get_product_release_collection(self, uuid: str, version: int) -> Collection: + data = self._http.get_json(f"/productRelease/{uuid}/collection/{version}") + return _validate(Collection, data) + + # --- Components --- + + def get_component(self, uuid: str) -> Component: + data = self._http.get_json(f"/component/{uuid}") + return _validate(Component, data) + + def get_component_releases(self, uuid: str) -> list[Release]: + data = self._http.get_json(f"/component/{uuid}/releases") + return _validate_list(Release, data) + + # --- Component Releases --- + + def get_component_release(self, uuid: str) -> ComponentReleaseWithCollection: + data = self._http.get_json(f"/componentRelease/{uuid}") + return _validate(ComponentReleaseWithCollection, data) + + def get_component_release_collection_latest(self, uuid: str) -> Collection: + data = self._http.get_json(f"/componentRelease/{uuid}/collection/latest") + return _validate(Collection, data) + + def get_component_release_collections(self, uuid: str) -> list[Collection]: + data = self._http.get_json(f"/componentRelease/{uuid}/collections") + return _validate_list(Collection, data) + + def get_component_release_collection(self, uuid: str, version: int) -> Collection: + data = self._http.get_json(f"/componentRelease/{uuid}/collection/{version}") + return _validate(Collection, data) + + # --- Artifacts --- + + def get_artifact(self, uuid: str) -> Artifact: + data = self._http.get_json(f"/artifact/{uuid}") + return _validate(Artifact, data) + + def download_artifact( + self, + url: str, + dest: Path, + *, + verify_checksums: list[Checksum] | None = None, + ) -> Path: + """Download an artifact file, optionally verifying checksums.""" + from libtea.exceptions import TeaChecksumError + + algorithms = [cs.alg_type.value for cs in verify_checksums] if verify_checksums else None + computed = self._http.download_with_hashes(url, dest, algorithms=algorithms) + + if verify_checksums: + for cs in verify_checksums: + expected = cs.alg_value.lower() + actual = computed.get(cs.alg_type.value, "").lower() + if actual != expected: + dest.unlink(missing_ok=True) + raise TeaChecksumError( + f"{cs.alg_type.value} mismatch: expected {expected}, got {actual}", + algorithm=cs.alg_type.value, + expected=expected, + actual=actual, + ) + + return dest + + # --- Lifecycle --- + + def close(self) -> None: + self._http.close() + + def __enter__(self) -> Self: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.close() diff --git a/tests/conftest.py b/tests/conftest.py index 49ef8ba..05cbc56 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,7 @@ import pytest from libtea._http import TeaHttpClient +from libtea.client import TeaClient BASE_URL = "https://api.example.com/tea/v1" @@ -10,6 +11,13 @@ def base_url(): return BASE_URL +@pytest.fixture +def client(): + c = TeaClient(base_url=BASE_URL) + yield c + c.close() + + @pytest.fixture def http_client(): c = TeaHttpClient(base_url=BASE_URL) diff --git a/tests/test_client.py b/tests/test_client.py new file mode 100644 index 0000000..ec52666 --- /dev/null +++ b/tests/test_client.py @@ -0,0 +1,312 @@ +import httpx +import pytest +import respx + +from libtea.client import TeaClient +from libtea.models import ( + Artifact, + Collection, + Component, + ComponentReleaseWithCollection, + PaginatedProductReleaseResponse, + Product, + ProductRelease, + Release, +) +from tests.conftest import BASE_URL as BASE + + +class TestProduct: + @respx.mock + def test_get_product(self, client): + respx.get(f"{BASE}/product/abc-123").mock( + return_value=httpx.Response( + 200, + json={ + "uuid": "abc-123", + "name": "Test Product", + "identifiers": [{"idType": "PURL", "idValue": "pkg:npm/test"}], + }, + ) + ) + product = client.get_product("abc-123") + assert isinstance(product, Product) + assert product.name == "Test Product" + + @respx.mock + def test_get_product_releases(self, client): + respx.get(f"{BASE}/product/abc-123/releases").mock( + return_value=httpx.Response( + 200, + json={ + "timestamp": "2024-03-20T15:30:00Z", + "pageStartIndex": 0, + "pageSize": 100, + "totalResults": 1, + "results": [ + { + "uuid": "rel-1", + "version": "1.0.0", + "createdDate": "2024-01-01T00:00:00Z", + "components": [{"uuid": "comp-1"}], + } + ], + }, + ) + ) + resp = client.get_product_releases("abc-123") + assert isinstance(resp, PaginatedProductReleaseResponse) + assert resp.total_results == 1 + + +class TestProductRelease: + @respx.mock + def test_get_product_release(self, client): + respx.get(f"{BASE}/productRelease/rel-1").mock( + return_value=httpx.Response( + 200, + json={ + "uuid": "rel-1", + "version": "1.0.0", + "createdDate": "2024-01-01T00:00:00Z", + "components": [{"uuid": "comp-1"}], + }, + ) + ) + release = client.get_product_release("rel-1") + assert isinstance(release, ProductRelease) + assert release.version == "1.0.0" + + @respx.mock + def test_get_product_release_collection_latest(self, client): + respx.get(f"{BASE}/productRelease/rel-1/collection/latest").mock( + return_value=httpx.Response( + 200, + json={ + "uuid": "rel-1", + "version": 1, + "artifacts": [], + }, + ) + ) + collection = client.get_product_release_collection_latest("rel-1") + assert isinstance(collection, Collection) + + +class TestComponent: + @respx.mock + def test_get_component(self, client): + respx.get(f"{BASE}/component/comp-1").mock( + return_value=httpx.Response( + 200, + json={ + "uuid": "comp-1", + "name": "Test Component", + "identifiers": [], + }, + ) + ) + component = client.get_component("comp-1") + assert isinstance(component, Component) + assert component.name == "Test Component" + + @respx.mock + def test_get_component_releases(self, client): + respx.get(f"{BASE}/component/comp-1/releases").mock( + return_value=httpx.Response( + 200, + json=[ + {"uuid": "cr-1", "version": "1.0.0", "createdDate": "2024-01-01T00:00:00Z"}, + ], + ) + ) + releases = client.get_component_releases("comp-1") + assert len(releases) == 1 + assert isinstance(releases[0], Release) + + +class TestComponentRelease: + @respx.mock + def test_get_component_release(self, client): + respx.get(f"{BASE}/componentRelease/cr-1").mock( + return_value=httpx.Response( + 200, + json={ + "release": {"uuid": "cr-1", "version": "1.0.0", "createdDate": "2024-01-01T00:00:00Z"}, + "latestCollection": {"uuid": "cr-1", "version": 1, "artifacts": []}, + }, + ) + ) + result = client.get_component_release("cr-1") + assert isinstance(result, ComponentReleaseWithCollection) + assert result.release.version == "1.0.0" + + @respx.mock + def test_get_component_release_collection_latest(self, client): + respx.get(f"{BASE}/componentRelease/cr-1/collection/latest").mock( + return_value=httpx.Response(200, json={"uuid": "cr-1", "version": 2, "artifacts": []}) + ) + collection = client.get_component_release_collection_latest("cr-1") + assert isinstance(collection, Collection) + assert collection.version == 2 + + @respx.mock + def test_get_component_release_collections(self, client): + respx.get(f"{BASE}/componentRelease/cr-1/collections").mock( + return_value=httpx.Response( + 200, + json=[ + {"uuid": "cr-1", "version": 1, "artifacts": []}, + {"uuid": "cr-1", "version": 2, "artifacts": []}, + ], + ) + ) + collections = client.get_component_release_collections("cr-1") + assert len(collections) == 2 + + @respx.mock + def test_get_component_release_collection_by_version(self, client): + respx.get(f"{BASE}/componentRelease/cr-1/collection/3").mock( + return_value=httpx.Response(200, json={"uuid": "cr-1", "version": 3, "artifacts": []}) + ) + collection = client.get_component_release_collection("cr-1", 3) + assert collection.version == 3 + + +class TestArtifact: + @respx.mock + def test_get_artifact(self, client): + respx.get(f"{BASE}/artifact/art-1").mock( + return_value=httpx.Response( + 200, + json={ + "uuid": "art-1", + "name": "SBOM", + "type": "BOM", + "formats": [ + { + "mediaType": "application/json", + "url": "https://example.com/sbom.json", + "checksums": [], + } + ], + }, + ) + ) + artifact = client.get_artifact("art-1") + assert isinstance(artifact, Artifact) + assert artifact.name == "SBOM" + + +class TestDiscovery: + @respx.mock + def test_discover(self, client): + tei = "urn:tei:uuid:example.com:d4d9f54a-abcf-11ee-ac79-1a52914d44b" + route = respx.get(f"{BASE}/discovery").mock( + return_value=httpx.Response( + 200, + json=[ + { + "productReleaseUuid": "d4d9f54a-abcf-11ee-ac79-1a52914d44b", + "servers": [{"rootUrl": "https://api.example.com", "versions": ["1.0.0"]}], + } + ], + ) + ) + results = client.discover(tei) + assert len(results) == 1 + assert results[0].product_release_uuid == "d4d9f54a-abcf-11ee-ac79-1a52914d44b" + # Verify TEI is NOT double-encoded (httpx auto-encodes params) + request = route.calls[0].request + assert "tei=" in str(request.url) + + @respx.mock + def test_discover_empty_result(self, client): + respx.get(f"{BASE}/discovery").mock(return_value=httpx.Response(200, json=[])) + results = client.discover("urn:tei:uuid:example.com:d4d9f54a") + assert results == [] + + +class TestFromWellKnown: + @respx.mock + def test_from_well_known_creates_client(self): + respx.get("https://example.com/.well-known/tea").mock( + return_value=httpx.Response( + 200, + json={ + "schemaVersion": 1, + "endpoints": [{"url": "https://api.example.com", "versions": ["0.3.0-beta.2"]}], + }, + ) + ) + client = TeaClient.from_well_known("example.com") + assert client is not None + client.close() + + @respx.mock + def test_from_well_known_no_compatible_version_raises(self): + from libtea.exceptions import TeaDiscoveryError + + respx.get("https://example.com/.well-known/tea").mock( + return_value=httpx.Response( + 200, + json={ + "schemaVersion": 1, + "endpoints": [{"url": "https://api.example.com", "versions": ["99.0.0"]}], + }, + ) + ) + with pytest.raises(TeaDiscoveryError): + TeaClient.from_well_known("example.com") + + @respx.mock + def test_from_well_known_passes_token(self): + respx.get("https://example.com/.well-known/tea").mock( + return_value=httpx.Response( + 200, + json={ + "schemaVersion": 1, + "endpoints": [{"url": "https://api.example.com", "versions": ["0.3.0-beta.2"]}], + }, + ) + ) + route = respx.get("https://api.example.com/v0.3.0-beta.2/product/abc").mock( + return_value=httpx.Response(200, json={"uuid": "abc", "name": "P", "identifiers": []}) + ) + client = TeaClient.from_well_known("example.com", token="secret") + client.get_product("abc") + assert route.calls[0].request.headers["authorization"] == "Bearer secret" + client.close() + + +class TestPagination: + @respx.mock + def test_get_product_releases_pagination_params(self, client): + route = respx.get(f"{BASE}/product/abc-123/releases").mock( + return_value=httpx.Response( + 200, + json={ + "timestamp": "2024-03-20T15:30:00Z", + "pageStartIndex": 50, + "pageSize": 25, + "totalResults": 200, + "results": [], + }, + ) + ) + resp = client.get_product_releases("abc-123", page_offset=50, page_size=25) + request = route.calls[0].request + assert "pageOffset=50" in str(request.url) + assert "pageSize=25" in str(request.url) + assert resp.page_start_index == 50 + + +class TestContextManager: + @respx.mock + def test_client_as_context_manager(self): + respx.get(f"{BASE}/component/c1").mock( + return_value=httpx.Response(200, json={"uuid": "c1", "name": "C1", "identifiers": []}) + ) + with TeaClient(base_url=BASE) as client: + component = client.get_component("c1") + assert component.name == "C1" diff --git a/tests/test_download.py b/tests/test_download.py new file mode 100644 index 0000000..4f9ef46 --- /dev/null +++ b/tests/test_download.py @@ -0,0 +1,68 @@ +import hashlib + +import httpx +import pytest +import respx + +from libtea.exceptions import TeaChecksumError +from libtea.models import Checksum, ChecksumAlgorithm + +ARTIFACT_URL = "https://artifacts.example.com/sbom.json" +ARTIFACT_CONTENT = b'{"bomFormat": "CycloneDX", "specVersion": "1.5"}' + + +class TestDownloadArtifact: + @respx.mock + def test_download_without_checksum(self, client, tmp_path): + respx.get(ARTIFACT_URL).mock(return_value=httpx.Response(200, content=ARTIFACT_CONTENT)) + dest = tmp_path / "sbom.json" + result = client.download_artifact(ARTIFACT_URL, dest) + assert result == dest + assert dest.read_bytes() == ARTIFACT_CONTENT + + @respx.mock + def test_download_with_valid_checksum(self, client, tmp_path): + respx.get(ARTIFACT_URL).mock(return_value=httpx.Response(200, content=ARTIFACT_CONTENT)) + sha256 = hashlib.sha256(ARTIFACT_CONTENT).hexdigest() + checksums = [Checksum(alg_type=ChecksumAlgorithm.SHA_256, alg_value=sha256)] + dest = tmp_path / "sbom.json" + result = client.download_artifact(ARTIFACT_URL, dest, verify_checksums=checksums) + assert result == dest + assert dest.exists() + + @respx.mock + def test_download_with_invalid_checksum_deletes_file(self, client, tmp_path): + respx.get(ARTIFACT_URL).mock(return_value=httpx.Response(200, content=ARTIFACT_CONTENT)) + checksums = [Checksum(alg_type=ChecksumAlgorithm.SHA_256, alg_value="badhash")] + dest = tmp_path / "sbom.json" + with pytest.raises(TeaChecksumError, match="SHA-256"): + client.download_artifact(ARTIFACT_URL, dest, verify_checksums=checksums) + assert not dest.exists() + + @respx.mock + def test_download_with_multiple_checksums(self, client, tmp_path): + respx.get(ARTIFACT_URL).mock(return_value=httpx.Response(200, content=ARTIFACT_CONTENT)) + sha256 = hashlib.sha256(ARTIFACT_CONTENT).hexdigest() + sha1 = hashlib.sha1(ARTIFACT_CONTENT).hexdigest() + checksums = [ + Checksum(alg_type=ChecksumAlgorithm.SHA_1, alg_value=sha1), + Checksum(alg_type=ChecksumAlgorithm.SHA_256, alg_value=sha256), + ] + dest = tmp_path / "sbom.json" + result = client.download_artifact(ARTIFACT_URL, dest, verify_checksums=checksums) + assert result == dest + + @respx.mock + def test_download_checksum_uppercase_hex_accepted(self, client, tmp_path): + respx.get(ARTIFACT_URL).mock(return_value=httpx.Response(200, content=ARTIFACT_CONTENT)) + sha256 = hashlib.sha256(ARTIFACT_CONTENT).hexdigest().upper() + checksums = [Checksum(alg_type=ChecksumAlgorithm.SHA_256, alg_value=sha256)] + dest = tmp_path / "sbom.json" + result = client.download_artifact(ARTIFACT_URL, dest, verify_checksums=checksums) + assert result == dest + + def test_download_with_blake3_raises_clear_error(self, client, tmp_path): + checksums = [Checksum(alg_type=ChecksumAlgorithm.BLAKE3, alg_value="somevalue")] + dest = tmp_path / "sbom.json" + with pytest.raises(TeaChecksumError, match="BLAKE3"): + client.download_artifact(ARTIFACT_URL, dest, verify_checksums=checksums) diff --git a/tests/test_http.py b/tests/test_http.py index 49bae32..fb9f165 100644 --- a/tests/test_http.py +++ b/tests/test_http.py @@ -101,5 +101,5 @@ def test_stream_to_file(self, http_client, base_url, tmp_path): content = b"file content here" respx.get("https://artifacts.example.com/sbom.xml").mock(return_value=httpx.Response(200, content=content)) dest = tmp_path / "sbom.xml" - http_client.download(url="https://artifacts.example.com/sbom.xml", dest=dest) + http_client.download_with_hashes(url="https://artifacts.example.com/sbom.xml", dest=dest) assert dest.read_bytes() == content diff --git a/tests/test_version.py b/tests/test_version.py index 261122f..3534d0e 100644 --- a/tests/test_version.py +++ b/tests/test_version.py @@ -1,5 +1,23 @@ import libtea +from libtea import TeaClient +from libtea.exceptions import TeaError +from libtea.models import Artifact, Collection, Product def test_version(): assert isinstance(libtea.__version__, str) + + +def test_public_api_exports(): + assert hasattr(libtea, "TeaClient") + assert libtea.TeaClient is TeaClient + + +def test_exception_importable(): + assert issubclass(TeaError, Exception) + + +def test_model_importable(): + assert Product is not None + assert Collection is not None + assert Artifact is not None From 0040fa704c115ff606f462a1c0b34c2faa231aee Mon Sep 17 00:00:00 2001 From: Rana Aurangzaib Date: Wed, 25 Feb 2026 15:42:31 +0300 Subject: [PATCH 04/17] Add GitHub Actions workflows for CI and CodeQL analysis, and configure Dependabot - Created a CI workflow to run tests across multiple Python versions using `uv`. - Added a CodeQL analysis workflow scheduled for weekly runs to enhance security. - Configured Dependabot for automatic updates of GitHub Actions and `uv` dependencies on a weekly basis. --- .github/dependabot.yml | 18 ++++++++++++++++++ .github/workflows/ci.yaml | 21 +++++++++++++++++++++ .github/workflows/codeql.yaml | 30 ++++++++++++++++++++++++++++++ 3 files changed, 69 insertions(+) create mode 100644 .github/dependabot.yml create mode 100644 .github/workflows/ci.yaml create mode 100644 .github/workflows/codeql.yaml diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..af38c43 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,18 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + groups: + github-actions: + patterns: + - "*" + - package-ecosystem: "uv" + directory: "/" + schedule: + interval: "weekly" + groups: + uv: + patterns: + - "*" diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 0000000..f202480 --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,21 @@ +name: CI + +on: + push: + branches: [master] + pull_request: + +jobs: + test: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.11", "3.12", "3.13"] + steps: + - uses: actions/checkout@v4 + - uses: astral-sh/setup-uv@v5 + - run: uv python install ${{ matrix.python-version }} + - run: uv sync + - run: uv run ruff check . + - run: uv run ruff format --check . + - run: uv run pytest --cov=libtea --cov-report=term-missing diff --git a/.github/workflows/codeql.yaml b/.github/workflows/codeql.yaml new file mode 100644 index 0000000..8e68a06 --- /dev/null +++ b/.github/workflows/codeql.yaml @@ -0,0 +1,30 @@ +name: CodeQL + +on: + push: + branches: [master] + pull_request: + schedule: + - cron: "0 6 * * 1" + +jobs: + analyze: + runs-on: ubuntu-latest + permissions: + security-events: write + strategy: + matrix: + language: [python] + steps: + - uses: actions/checkout@v4 + + - name: Initialize CodeQL + uses: github/codeql-action/init@v3 + with: + languages: ${{ matrix.language }} + + - name: Autobuild + uses: github/codeql-action/autobuild@v3 + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 From f961fbf86aae41d61d2336750b3f629938d433e0 Mon Sep 17 00:00:00 2001 From: Rana Aurangzaib Date: Wed, 25 Feb 2026 15:48:21 +0300 Subject: [PATCH 05/17] Address PR review: clean up partial downloads, fix checksum verification, update README - Clean up partial files on download failure (transport error or any exception) - Replace hashlib._Hash private type with Any - Raise explicit error when checksum algorithm has no computed digest - Update README with usage examples and error handling docs - Add Python 3.10 to CI matrix --- .github/workflows/ci.yaml | 2 +- README.md | 118 +++++++++++++++++++++++++++++++++----- libtea/_http.py | 6 +- libtea/client.py | 15 ++++- tests/test_download.py | 20 +++++++ tests/test_http.py | 8 +++ 6 files changed, 150 insertions(+), 19 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index f202480..ef47b5d 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.11", "3.12", "3.13"] + python-version: ["3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 - uses: astral-sh/setup-uv@v5 diff --git a/README.md b/README.md index ac23da2..30b1c2b 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # libtea -Python client library for the [Transparency Exchange API (TEA)](https://transparency.exchange/). +Python client library for the [Transparency Exchange API (TEA)](https://transparency.exchange/) v0.3.0-beta.2. > **Status**: Alpha — API is subject to change. @@ -10,25 +10,115 @@ Python client library for the [Transparency Exchange API (TEA)](https://transpar pip install libtea ``` -## Development +## Quick start -This project uses [uv](https://docs.astral.sh/uv/) for dependency management. +```python +from libtea import TeaClient -```bash -# Install dependencies -uv sync +# Connect directly +client = TeaClient(base_url="https://api.example.com/tea/v0.3.0-beta.2") + +# Or auto-discover from a domain's .well-known/tea +client = TeaClient.from_well_known("example.com") + +# With authentication +client = TeaClient.from_well_known("example.com", token="your-bearer-token") +``` + +## Usage + +### Products and releases + +```python +with TeaClient.from_well_known("example.com") as client: + product = client.get_product("product-uuid") + print(product.name, product.identifiers) -# Run tests -uv run pytest + releases = client.get_product_releases("product-uuid", page_size=25) + for release in releases.results: + print(release.version, release.created_date) +``` + +### Components + +```python + component = client.get_component("component-uuid") + releases = client.get_component_releases("component-uuid") + + # Get a component release with its latest collection + cr = client.get_component_release("release-uuid") + print(cr.release.version, len(cr.latest_collection.artifacts)) +``` + +### Collections and artifacts + +```python + collection = client.get_component_release_collection_latest("release-uuid") + for artifact in collection.artifacts: + print(artifact.name, artifact.type) + + # Specific collection version + collection_v3 = client.get_component_release_collection("release-uuid", 3) +``` -# Lint -uv run ruff check . +### Downloading artifacts with checksum verification -# Format check -uv run ruff format --check . +```python +from pathlib import Path -# Build -uv build + artifact = client.get_artifact("artifact-uuid") + fmt = artifact.formats[0] + + # Downloads and verifies checksums on-the-fly + client.download_artifact( + fmt.url, + Path("sbom.json"), + verify_checksums=fmt.checksums, + ) +``` + +### Discovery via TEI + +```python + results = client.discover("urn:tei:uuid:example.com:d4d9f54a-abcf-11ee-ac79-1a52914d44b") + for info in results: + print(info.product_release_uuid, info.servers) +``` + +## Error handling + +All exceptions inherit from `TeaError`: + +```python +from libtea.exceptions import ( + TeaError, # Base exception + TeaConnectionError, # Network failure or timeout + TeaAuthenticationError,# HTTP 401/403 + TeaNotFoundError, # HTTP 404 (has .error_type: "OBJECT_UNKNOWN" or "OBJECT_NOT_SHAREABLE") + TeaRequestError, # HTTP 4xx (other) + TeaServerError, # HTTP 5xx + TeaDiscoveryError, # Invalid TEI, .well-known failure, or no compatible endpoint + TeaChecksumError, # Checksum mismatch (has .algorithm, .expected, .actual) + TeaValidationError, # Malformed server response +) +``` + +## Requirements + +- Python >= 3.11 +- [httpx](https://www.python-httpx.org/) for HTTP +- [Pydantic](https://docs.pydantic.dev/) v2 for data models + +## Development + +This project uses [uv](https://docs.astral.sh/uv/) for dependency management. + +```bash +uv sync # Install dependencies +uv run pytest # Run tests (with coverage) +uv run ruff check . # Lint +uv run ruff format --check . # Format check +uv build # Build wheel and sdist ``` ## License diff --git a/libtea/_http.py b/libtea/_http.py index 825af78..76c13d9 100644 --- a/libtea/_http.py +++ b/libtea/_http.py @@ -54,7 +54,7 @@ def download_with_hashes(self, url: str, dest: Path, algorithms: list[str] | Non """ from libtea.exceptions import TeaChecksumError - hashers: dict[str, hashlib._Hash] = {} + hashers: dict[str, Any] = {} if algorithms: alg_map = { "MD5": "md5", @@ -96,7 +96,11 @@ def download_with_hashes(self, url: str, dest: Path, algorithms: list[str] | Non for h in hashers.values(): h.update(chunk) except httpx.TransportError as exc: + dest.unlink(missing_ok=True) raise TeaConnectionError(str(exc)) from exc + except Exception: + dest.unlink(missing_ok=True) + raise return {alg: h.hexdigest() for alg, h in hashers.items()} diff --git a/libtea/client.py b/libtea/client.py index aea7e8d..6b4a87e 100644 --- a/libtea/client.py +++ b/libtea/client.py @@ -157,13 +157,22 @@ def download_artifact( if verify_checksums: for cs in verify_checksums: + alg_name = cs.alg_type.value expected = cs.alg_value.lower() - actual = computed.get(cs.alg_type.value, "").lower() + if alg_name not in computed: + dest.unlink(missing_ok=True) + raise TeaChecksumError( + f"No computed digest for algorithm: {alg_name}", + algorithm=alg_name, + expected=expected, + actual=None, + ) + actual = computed[alg_name].lower() if actual != expected: dest.unlink(missing_ok=True) raise TeaChecksumError( - f"{cs.alg_type.value} mismatch: expected {expected}, got {actual}", - algorithm=cs.alg_type.value, + f"{alg_name} mismatch: expected {expected}, got {actual}", + algorithm=alg_name, expected=expected, actual=actual, ) diff --git a/tests/test_download.py b/tests/test_download.py index 4f9ef46..5538547 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -66,3 +66,23 @@ def test_download_with_blake3_raises_clear_error(self, client, tmp_path): dest = tmp_path / "sbom.json" with pytest.raises(TeaChecksumError, match="BLAKE3"): client.download_artifact(ARTIFACT_URL, dest, verify_checksums=checksums) + + @respx.mock + def test_download_with_unknown_algorithm_raises_clear_error(self, client, tmp_path): + """If an algorithm has no hashlib mapping, verification should raise explicitly.""" + respx.get(ARTIFACT_URL).mock(return_value=httpx.Response(200, content=ARTIFACT_CONTENT)) + checksums = [Checksum(alg_type=ChecksumAlgorithm.BLAKE3, alg_value="abc123")] + dest = tmp_path / "sbom.json" + # BLAKE3 raises before download, so test with a checksum whose algorithm + # was silently skipped during hashing (simulated by providing BLAKE3 in + # verify_checksums but not in the download algorithms list). + # Instead, we test the path by calling download_artifact with a checksum + # that has an algorithm not in the computed dict. + from unittest.mock import patch + + # Patch download_with_hashes to return empty dict (no algorithms computed) + with patch.object(client._http, "download_with_hashes", return_value={}): + checksums = [Checksum(alg_type=ChecksumAlgorithm.SHA_256, alg_value="abc123")] + with pytest.raises(TeaChecksumError, match="No computed digest"): + client.download_artifact(ARTIFACT_URL, dest, verify_checksums=checksums) + assert not dest.exists() diff --git a/tests/test_http.py b/tests/test_http.py index fb9f165..c79c628 100644 --- a/tests/test_http.py +++ b/tests/test_http.py @@ -103,3 +103,11 @@ def test_stream_to_file(self, http_client, base_url, tmp_path): dest = tmp_path / "sbom.xml" http_client.download_with_hashes(url="https://artifacts.example.com/sbom.xml", dest=dest) assert dest.read_bytes() == content + + @respx.mock + def test_download_cleans_up_partial_file_on_transport_error(self, http_client, tmp_path): + respx.get("https://artifacts.example.com/sbom.xml").mock(side_effect=httpx.ConnectError("refused")) + dest = tmp_path / "sbom.xml" + with pytest.raises(TeaConnectionError): + http_client.download_with_hashes(url="https://artifacts.example.com/sbom.xml", dest=dest) + assert not dest.exists() From 4a27168a2e3cff16dbce6ceb96d058bac4b1960f Mon Sep 17 00:00:00 2001 From: Rana Aurangzaib Date: Wed, 25 Feb 2026 16:04:59 +0300 Subject: [PATCH 06/17] Address PR review round 2: user-agent, JSON error handling, download robustness - User-agent now follows sbomify pattern: py-libtea/{version} (hello@sbomify.com) - Wrap response.json() in try/except to raise TeaValidationError on non-JSON 2xx - Create parent directories before writing download dest - Drop Python 3.10 from CI matrix (requires-python >= 3.11) --- .github/workflows/ci.yaml | 2 +- libtea/_http.py | 20 +++++++++++++++++--- tests/test_http.py | 25 +++++++++++++++++++++++++ 3 files changed, 43 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index ef47b5d..f202480 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.10", "3.11", "3.12", "3.13"] + python-version: ["3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 - uses: astral-sh/setup-uv@v5 diff --git a/libtea/_http.py b/libtea/_http.py index 76c13d9..47b30c7 100644 --- a/libtea/_http.py +++ b/libtea/_http.py @@ -1,6 +1,7 @@ """Internal HTTP client wrapping httpx with TEA error handling.""" import hashlib +from importlib.metadata import version as _pkg_version from pathlib import Path from typing import Any @@ -12,9 +13,18 @@ TeaNotFoundError, TeaRequestError, TeaServerError, + TeaValidationError, ) +def _get_user_agent() -> str: + try: + v = _pkg_version("libtea") + except Exception: + v = "unknown" + return f"py-libtea/{v} (hello@sbomify.com)" + + class TeaHttpClient: """Low-level HTTP client for TEA API requests.""" @@ -25,7 +35,7 @@ def __init__( token: str | None = None, timeout: float = 30.0, ): - headers = {"user-agent": "py-libtea"} + headers = {"user-agent": _get_user_agent()} if token: headers["authorization"] = f"Bearer {token}" @@ -44,7 +54,10 @@ def get_json(self, path: str, *, params: dict[str, Any] | None = None) -> Any: raise TeaConnectionError(str(exc)) from exc self._raise_for_status(response) - return response.json() + try: + return response.json() + except ValueError as exc: + raise TeaValidationError(f"Invalid JSON in response: {exc}") from exc def download_with_hashes(self, url: str, dest: Path, algorithms: list[str] | None = None) -> dict[str, str]: """Download a file and compute checksums on-the-fly. Returns {algorithm: hex_digest}. @@ -83,9 +96,10 @@ def download_with_hashes(self, url: str, dest: Path, algorithms: list[str] | Non elif hashlib_name: hashers[alg] = hashlib.new(hashlib_name) + dest.parent.mkdir(parents=True, exist_ok=True) try: with httpx.Client( - headers={"user-agent": "py-libtea"}, + headers={"user-agent": _get_user_agent()}, timeout=self._timeout, ) as download_client: with download_client.stream("GET", url) as response: diff --git a/tests/test_http.py b/tests/test_http.py index c79c628..0bbbde4 100644 --- a/tests/test_http.py +++ b/tests/test_http.py @@ -9,6 +9,7 @@ TeaNotFoundError, TeaRequestError, TeaServerError, + TeaValidationError, ) @@ -111,3 +112,27 @@ def test_download_cleans_up_partial_file_on_transport_error(self, http_client, t with pytest.raises(TeaConnectionError): http_client.download_with_hashes(url="https://artifacts.example.com/sbom.xml", dest=dest) assert not dest.exists() + + @respx.mock + def test_get_json_non_json_response_raises_validation_error(self, http_client, base_url): + respx.get(f"{base_url}/product/abc").mock(return_value=httpx.Response(200, content=b"not json")) + with pytest.raises(TeaValidationError, match="Invalid JSON"): + http_client.get_json("/product/abc") + + @respx.mock + def test_download_creates_parent_directories(self, http_client, tmp_path): + content = b"nested file" + respx.get("https://artifacts.example.com/sbom.xml").mock(return_value=httpx.Response(200, content=content)) + dest = tmp_path / "a" / "b" / "sbom.xml" + http_client.download_with_hashes(url="https://artifacts.example.com/sbom.xml", dest=dest) + assert dest.read_bytes() == content + + @respx.mock + def test_user_agent_includes_version(self, base_url): + route = respx.get(f"{base_url}/product/abc").mock(return_value=httpx.Response(200, json={"uuid": "abc"})) + client = TeaHttpClient(base_url=base_url) + client.get_json("/product/abc") + ua = route.calls[0].request.headers["user-agent"] + assert ua.startswith("py-libtea/") + assert "hello@sbomify.com" in ua + client.close() From 6292926213a829eb764bf3ddf59306d74ae4a05c Mon Sep 17 00:00:00 2001 From: Rana Aurangzaib Date: Wed, 25 Feb 2026 16:12:24 +0300 Subject: [PATCH 07/17] Refactor user-agent handling to retrieve package version dynamically - Updated `_get_user_agent` function to `_get_package_version`, which now retrieves the package version using `importlib.metadata` or falls back to reading `pyproject.toml`. - Adjusted `USER_AGENT` to use the new version retrieval method. - Ensured consistent user-agent usage in `TeaHttpClient` methods. --- libtea/_http.py | 28 +++++++++++++++++++++------- 1 file changed, 21 insertions(+), 7 deletions(-) diff --git a/libtea/_http.py b/libtea/_http.py index 47b30c7..84ab207 100644 --- a/libtea/_http.py +++ b/libtea/_http.py @@ -1,7 +1,6 @@ """Internal HTTP client wrapping httpx with TEA error handling.""" import hashlib -from importlib.metadata import version as _pkg_version from pathlib import Path from typing import Any @@ -17,12 +16,27 @@ ) -def _get_user_agent() -> str: +def _get_package_version() -> str: + """Get the package version for User-Agent header.""" try: - v = _pkg_version("libtea") + from importlib.metadata import version + + return version("libtea") except Exception: - v = "unknown" - return f"py-libtea/{v} (hello@sbomify.com)" + try: + import tomllib + + pyproject_path = Path(__file__).parent.parent / "pyproject.toml" + if pyproject_path.exists(): + with open(pyproject_path, "rb") as f: + pyproject_data = tomllib.load(f) + return pyproject_data.get("project", {}).get("version", "unknown") + except Exception: + pass + return "unknown" + + +USER_AGENT = f"py-libtea/{_get_package_version()} (hello@sbomify.com)" class TeaHttpClient: @@ -35,7 +49,7 @@ def __init__( token: str | None = None, timeout: float = 30.0, ): - headers = {"user-agent": _get_user_agent()} + headers = {"user-agent": USER_AGENT} if token: headers["authorization"] = f"Bearer {token}" @@ -99,7 +113,7 @@ def download_with_hashes(self, url: str, dest: Path, algorithms: list[str] | Non dest.parent.mkdir(parents=True, exist_ok=True) try: with httpx.Client( - headers={"user-agent": _get_user_agent()}, + headers={"user-agent": USER_AGENT}, timeout=self._timeout, ) as download_client: with download_client.stream("GET", url) as response: From b95d3144d786a303288399f785aaf3e2b106eed7 Mon Sep 17 00:00:00 2001 From: Rana Aurangzaib Date: Wed, 25 Feb 2026 16:24:18 +0300 Subject: [PATCH 08/17] Address PR review round 3: follow redirects, discovery error handling, README fixes - Enable follow_redirects=True on both API and download httpx clients - Wrap JSON/validation errors in fetch_well_known as TeaDiscoveryError - Match sbomify-action user-agent pattern with pyproject.toml fallback - Fix README code snippets to be self-contained (remove stale indentation) --- README.md | 42 ++++++++++++++++++++--------------------- libtea/_http.py | 2 ++ libtea/discovery.py | 10 +++++++++- tests/test_discovery.py | 12 ++++++++++++ 4 files changed, 44 insertions(+), 22 deletions(-) diff --git a/README.md b/README.md index 30b1c2b..e9f8826 100644 --- a/README.md +++ b/README.md @@ -42,23 +42,23 @@ with TeaClient.from_well_known("example.com") as client: ### Components ```python - component = client.get_component("component-uuid") - releases = client.get_component_releases("component-uuid") +component = client.get_component("component-uuid") +releases = client.get_component_releases("component-uuid") - # Get a component release with its latest collection - cr = client.get_component_release("release-uuid") - print(cr.release.version, len(cr.latest_collection.artifacts)) +# Get a component release with its latest collection +cr = client.get_component_release("release-uuid") +print(cr.release.version, len(cr.latest_collection.artifacts)) ``` ### Collections and artifacts ```python - collection = client.get_component_release_collection_latest("release-uuid") - for artifact in collection.artifacts: - print(artifact.name, artifact.type) +collection = client.get_component_release_collection_latest("release-uuid") +for artifact in collection.artifacts: + print(artifact.name, artifact.type) - # Specific collection version - collection_v3 = client.get_component_release_collection("release-uuid", 3) +# Specific collection version +collection_v3 = client.get_component_release_collection("release-uuid", 3) ``` ### Downloading artifacts with checksum verification @@ -66,23 +66,23 @@ with TeaClient.from_well_known("example.com") as client: ```python from pathlib import Path - artifact = client.get_artifact("artifact-uuid") - fmt = artifact.formats[0] +artifact = client.get_artifact("artifact-uuid") +fmt = artifact.formats[0] - # Downloads and verifies checksums on-the-fly - client.download_artifact( - fmt.url, - Path("sbom.json"), - verify_checksums=fmt.checksums, - ) +# Downloads and verifies checksums on-the-fly +client.download_artifact( + fmt.url, + Path("sbom.json"), + verify_checksums=fmt.checksums, +) ``` ### Discovery via TEI ```python - results = client.discover("urn:tei:uuid:example.com:d4d9f54a-abcf-11ee-ac79-1a52914d44b") - for info in results: - print(info.product_release_uuid, info.servers) +results = client.discover("urn:tei:uuid:example.com:d4d9f54a-abcf-11ee-ac79-1a52914d44b") +for info in results: + print(info.product_release_uuid, info.servers) ``` ## Error handling diff --git a/libtea/_http.py b/libtea/_http.py index 84ab207..c6233f5 100644 --- a/libtea/_http.py +++ b/libtea/_http.py @@ -58,6 +58,7 @@ def __init__( base_url=base_url, headers=headers, timeout=timeout, + follow_redirects=True, ) def get_json(self, path: str, *, params: dict[str, Any] | None = None) -> Any: @@ -115,6 +116,7 @@ def download_with_hashes(self, url: str, dest: Path, algorithms: list[str] | Non with httpx.Client( headers={"user-agent": USER_AGENT}, timeout=self._timeout, + follow_redirects=True, ) as download_client: with download_client.stream("GET", url) as response: self._raise_for_status(response) diff --git a/libtea/discovery.py b/libtea/discovery.py index 702b15b..11e96fa 100644 --- a/libtea/discovery.py +++ b/libtea/discovery.py @@ -33,7 +33,15 @@ def fetch_well_known(domain: str, *, timeout: float = 10.0) -> TeaWellKnown: except httpx.TransportError as exc: raise TeaDiscoveryError(f"Failed to connect to {url}: {exc}") from exc - return TeaWellKnown.model_validate(response.json()) + try: + data = response.json() + except ValueError as exc: + raise TeaDiscoveryError(f"Invalid JSON in .well-known/tea response from {domain}") from exc + + try: + return TeaWellKnown.model_validate(data) + except Exception as exc: + raise TeaDiscoveryError(f"Invalid .well-known/tea document from {domain}: {exc}") from exc def select_endpoint(well_known: TeaWellKnown, supported_version: str) -> TeaEndpoint: diff --git a/tests/test_discovery.py b/tests/test_discovery.py index 5b8823d..f3d52ed 100644 --- a/tests/test_discovery.py +++ b/tests/test_discovery.py @@ -87,6 +87,18 @@ def test_fetch_well_known_500_raises_discovery_error(self): with pytest.raises(TeaDiscoveryError): fetch_well_known("example.com") + @respx.mock + def test_fetch_well_known_non_json_raises_discovery_error(self): + respx.get("https://example.com/.well-known/tea").mock(return_value=httpx.Response(200, content=b"not json")) + with pytest.raises(TeaDiscoveryError, match="Invalid JSON"): + fetch_well_known("example.com") + + @respx.mock + def test_fetch_well_known_invalid_schema_raises_discovery_error(self): + respx.get("https://example.com/.well-known/tea").mock(return_value=httpx.Response(200, json={"bad": "data"})) + with pytest.raises(TeaDiscoveryError, match="Invalid .well-known/tea"): + fetch_well_known("example.com") + class TestSelectEndpoint: def _make_well_known(self, endpoints: list[dict]) -> TeaWellKnown: From ab31da5649d2b3ab704e545f0e6744b7a365ff20 Mon Sep 17 00:00:00 2001 From: Rana Aurangzaib Date: Wed, 25 Feb 2026 16:53:10 +0300 Subject: [PATCH 09/17] Refactor HTTP client to use requests library and update dependencies - Replaced httpx with requests in the TeaHttpClient for improved error handling and session management. - Updated dependencies in pyproject.toml to use requests instead of httpx, and modified related README documentation. - Adjusted unit tests to utilize responses for mocking HTTP requests instead of respx. - Cleaned up unused imports and ensured consistent error handling across the codebase. --- README.md | 2 +- libtea/_http.py | 55 ++++--- libtea/client.py | 2 +- libtea/discovery.py | 15 +- pyproject.toml | 4 +- tests/test_client.py | 319 ++++++++++++++++++---------------------- tests/test_discovery.py | 40 +++-- tests/test_download.py | 32 ++-- tests/test_http.py | 84 +++++------ uv.lock | 167 +++++++++++++-------- 10 files changed, 364 insertions(+), 356 deletions(-) diff --git a/README.md b/README.md index e9f8826..1314fda 100644 --- a/README.md +++ b/README.md @@ -106,7 +106,7 @@ from libtea.exceptions import ( ## Requirements - Python >= 3.11 -- [httpx](https://www.python-httpx.org/) for HTTP +- [requests](https://requests.readthedocs.io/) for HTTP - [Pydantic](https://docs.pydantic.dev/) v2 for data models ## Development diff --git a/libtea/_http.py b/libtea/_http.py index c6233f5..526d676 100644 --- a/libtea/_http.py +++ b/libtea/_http.py @@ -1,10 +1,10 @@ -"""Internal HTTP client wrapping httpx with TEA error handling.""" +"""Internal HTTP client wrapping requests with TEA error handling.""" import hashlib from pathlib import Path from typing import Any -import httpx +import requests from libtea.exceptions import ( TeaAuthenticationError, @@ -49,23 +49,21 @@ def __init__( token: str | None = None, timeout: float = 30.0, ): - headers = {"user-agent": USER_AGENT} - if token: - headers["authorization"] = f"Bearer {token}" - + self._base_url = base_url.rstrip("/") self._timeout = timeout - self._client = httpx.Client( - base_url=base_url, - headers=headers, - timeout=timeout, - follow_redirects=True, - ) + self._session = requests.Session() + self._session.headers["user-agent"] = USER_AGENT + if token: + self._session.headers["authorization"] = f"Bearer {token}" def get_json(self, path: str, *, params: dict[str, Any] | None = None) -> Any: """Send GET request and return parsed JSON.""" + url = f"{self._base_url}{path}" try: - response = self._client.get(path, params=params) - except httpx.TransportError as exc: + response = self._session.get(url, params=params, timeout=self._timeout) + except requests.ConnectionError as exc: + raise TeaConnectionError(str(exc)) from exc + except requests.Timeout as exc: raise TeaConnectionError(str(exc)) from exc self._raise_for_status(response) @@ -77,7 +75,7 @@ def get_json(self, path: str, *, params: dict[str, Any] | None = None) -> Any: def download_with_hashes(self, url: str, dest: Path, algorithms: list[str] | None = None) -> dict[str, str]: """Download a file and compute checksums on-the-fly. Returns {algorithm: hex_digest}. - Uses a separate unauthenticated httpx client so that the bearer token + Uses a separate unauthenticated session so that the bearer token is not leaked to third-party artifact hosts (CDNs, Maven Central, etc.). """ from libtea.exceptions import TeaChecksumError @@ -113,19 +111,16 @@ def download_with_hashes(self, url: str, dest: Path, algorithms: list[str] | Non dest.parent.mkdir(parents=True, exist_ok=True) try: - with httpx.Client( - headers={"user-agent": USER_AGENT}, - timeout=self._timeout, - follow_redirects=True, - ) as download_client: - with download_client.stream("GET", url) as response: - self._raise_for_status(response) - with open(dest, "wb") as f: - for chunk in response.iter_bytes(chunk_size=8192): - f.write(chunk) - for h in hashers.values(): - h.update(chunk) - except httpx.TransportError as exc: + with requests.Session() as download_session: + download_session.headers["user-agent"] = USER_AGENT + response = download_session.get(url, stream=True, timeout=self._timeout) + self._raise_for_status(response) + with open(dest, "wb") as f: + for chunk in response.iter_content(chunk_size=8192): + f.write(chunk) + for h in hashers.values(): + h.update(chunk) + except (requests.ConnectionError, requests.Timeout) as exc: dest.unlink(missing_ok=True) raise TeaConnectionError(str(exc)) from exc except Exception: @@ -135,7 +130,7 @@ def download_with_hashes(self, url: str, dest: Path, algorithms: list[str] | Non return {alg: h.hexdigest() for alg, h in hashers.items()} def close(self) -> None: - self._client.close() + self._session.close() def __enter__(self): return self @@ -144,7 +139,7 @@ def __exit__(self, *args): self.close() @staticmethod - def _raise_for_status(response: httpx.Response) -> None: + def _raise_for_status(response: requests.Response) -> None: """Map HTTP status codes to typed exceptions.""" status = response.status_code if 200 <= status < 300: diff --git a/libtea/client.py b/libtea/client.py index 6b4a87e..583d43e 100644 --- a/libtea/client.py +++ b/libtea/client.py @@ -71,7 +71,7 @@ def from_well_known( # --- Discovery --- def discover(self, tei: str) -> list[DiscoveryInfo]: - # httpx auto-encodes query params — do NOT pre-encode with quote() + # requests auto-encodes query params — do NOT pre-encode with quote() data = self._http.get_json("/discovery", params={"tei": tei}) return _validate_list(DiscoveryInfo, data) diff --git a/libtea/discovery.py b/libtea/discovery.py index 11e96fa..483f186 100644 --- a/libtea/discovery.py +++ b/libtea/discovery.py @@ -1,6 +1,6 @@ """TEI parsing, .well-known/tea fetching, and endpoint selection.""" -import httpx +import requests from libtea.exceptions import TeaDiscoveryError from libtea.models import TeaEndpoint, TeaWellKnown @@ -26,12 +26,15 @@ def fetch_well_known(domain: str, *, timeout: float = 10.0) -> TeaWellKnown: """Fetch and parse the .well-known/tea document from a domain via HTTPS.""" url = f"https://{domain}/.well-known/tea" try: - response = httpx.get(url, timeout=timeout, follow_redirects=True) - response.raise_for_status() - except httpx.HTTPStatusError as exc: - raise TeaDiscoveryError(f"Failed to fetch {url}: HTTP {exc.response.status_code}") from exc - except httpx.TransportError as exc: + response = requests.get(url, timeout=timeout, allow_redirects=True) + if response.status_code >= 400: + raise TeaDiscoveryError(f"Failed to fetch {url}: HTTP {response.status_code}") + except requests.ConnectionError as exc: raise TeaDiscoveryError(f"Failed to connect to {url}: {exc}") from exc + except requests.Timeout as exc: + raise TeaDiscoveryError(f"Failed to connect to {url}: {exc}") from exc + except TeaDiscoveryError: + raise try: data = response.json() diff --git a/pyproject.toml b/pyproject.toml index 086c261..55e9256 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,7 +20,7 @@ classifiers = [ "Topic :: Software Development :: Libraries :: Python Modules", ] dependencies = [ - "httpx>=0.27.0,<1", + "requests>=2.31.0,<3", "pydantic>=2.1.0,<3", ] @@ -37,7 +37,7 @@ dev = [ "pytest-cov>=4.1.0,<5", "ruff>=0.12.0,<0.13", "pre-commit>=4.2.0,<5", - "respx>=0.22.0,<1", + "responses>=0.25.0,<1", ] [tool.hatch.build.targets.wheel] diff --git a/tests/test_client.py b/tests/test_client.py index ec52666..5407d56 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -1,6 +1,5 @@ -import httpx import pytest -import respx +import responses from libtea.client import TeaClient from libtea.models import ( @@ -17,42 +16,38 @@ class TestProduct: - @respx.mock + @responses.activate def test_get_product(self, client): - respx.get(f"{BASE}/product/abc-123").mock( - return_value=httpx.Response( - 200, - json={ - "uuid": "abc-123", - "name": "Test Product", - "identifiers": [{"idType": "PURL", "idValue": "pkg:npm/test"}], - }, - ) + responses.get( + f"{BASE}/product/abc-123", + json={ + "uuid": "abc-123", + "name": "Test Product", + "identifiers": [{"idType": "PURL", "idValue": "pkg:npm/test"}], + }, ) product = client.get_product("abc-123") assert isinstance(product, Product) assert product.name == "Test Product" - @respx.mock + @responses.activate def test_get_product_releases(self, client): - respx.get(f"{BASE}/product/abc-123/releases").mock( - return_value=httpx.Response( - 200, - json={ - "timestamp": "2024-03-20T15:30:00Z", - "pageStartIndex": 0, - "pageSize": 100, - "totalResults": 1, - "results": [ - { - "uuid": "rel-1", - "version": "1.0.0", - "createdDate": "2024-01-01T00:00:00Z", - "components": [{"uuid": "comp-1"}], - } - ], - }, - ) + responses.get( + f"{BASE}/product/abc-123/releases", + json={ + "timestamp": "2024-03-20T15:30:00Z", + "pageStartIndex": 0, + "pageSize": 100, + "totalResults": 1, + "results": [ + { + "uuid": "rel-1", + "version": "1.0.0", + "createdDate": "2024-01-01T00:00:00Z", + "components": [{"uuid": "comp-1"}], + } + ], + }, ) resp = client.get_product_releases("abc-123") assert isinstance(resp, PaginatedProductReleaseResponse) @@ -60,65 +55,57 @@ def test_get_product_releases(self, client): class TestProductRelease: - @respx.mock + @responses.activate def test_get_product_release(self, client): - respx.get(f"{BASE}/productRelease/rel-1").mock( - return_value=httpx.Response( - 200, - json={ - "uuid": "rel-1", - "version": "1.0.0", - "createdDate": "2024-01-01T00:00:00Z", - "components": [{"uuid": "comp-1"}], - }, - ) + responses.get( + f"{BASE}/productRelease/rel-1", + json={ + "uuid": "rel-1", + "version": "1.0.0", + "createdDate": "2024-01-01T00:00:00Z", + "components": [{"uuid": "comp-1"}], + }, ) release = client.get_product_release("rel-1") assert isinstance(release, ProductRelease) assert release.version == "1.0.0" - @respx.mock + @responses.activate def test_get_product_release_collection_latest(self, client): - respx.get(f"{BASE}/productRelease/rel-1/collection/latest").mock( - return_value=httpx.Response( - 200, - json={ - "uuid": "rel-1", - "version": 1, - "artifacts": [], - }, - ) + responses.get( + f"{BASE}/productRelease/rel-1/collection/latest", + json={ + "uuid": "rel-1", + "version": 1, + "artifacts": [], + }, ) collection = client.get_product_release_collection_latest("rel-1") assert isinstance(collection, Collection) class TestComponent: - @respx.mock + @responses.activate def test_get_component(self, client): - respx.get(f"{BASE}/component/comp-1").mock( - return_value=httpx.Response( - 200, - json={ - "uuid": "comp-1", - "name": "Test Component", - "identifiers": [], - }, - ) + responses.get( + f"{BASE}/component/comp-1", + json={ + "uuid": "comp-1", + "name": "Test Component", + "identifiers": [], + }, ) component = client.get_component("comp-1") assert isinstance(component, Component) assert component.name == "Test Component" - @respx.mock + @responses.activate def test_get_component_releases(self, client): - respx.get(f"{BASE}/component/comp-1/releases").mock( - return_value=httpx.Response( - 200, - json=[ - {"uuid": "cr-1", "version": "1.0.0", "createdDate": "2024-01-01T00:00:00Z"}, - ], - ) + responses.get( + f"{BASE}/component/comp-1/releases", + json=[ + {"uuid": "cr-1", "version": "1.0.0", "createdDate": "2024-01-01T00:00:00Z"}, + ], ) releases = client.get_component_releases("comp-1") assert len(releases) == 1 @@ -126,72 +113,68 @@ def test_get_component_releases(self, client): class TestComponentRelease: - @respx.mock + @responses.activate def test_get_component_release(self, client): - respx.get(f"{BASE}/componentRelease/cr-1").mock( - return_value=httpx.Response( - 200, - json={ - "release": {"uuid": "cr-1", "version": "1.0.0", "createdDate": "2024-01-01T00:00:00Z"}, - "latestCollection": {"uuid": "cr-1", "version": 1, "artifacts": []}, - }, - ) + responses.get( + f"{BASE}/componentRelease/cr-1", + json={ + "release": {"uuid": "cr-1", "version": "1.0.0", "createdDate": "2024-01-01T00:00:00Z"}, + "latestCollection": {"uuid": "cr-1", "version": 1, "artifacts": []}, + }, ) result = client.get_component_release("cr-1") assert isinstance(result, ComponentReleaseWithCollection) assert result.release.version == "1.0.0" - @respx.mock + @responses.activate def test_get_component_release_collection_latest(self, client): - respx.get(f"{BASE}/componentRelease/cr-1/collection/latest").mock( - return_value=httpx.Response(200, json={"uuid": "cr-1", "version": 2, "artifacts": []}) + responses.get( + f"{BASE}/componentRelease/cr-1/collection/latest", + json={"uuid": "cr-1", "version": 2, "artifacts": []}, ) collection = client.get_component_release_collection_latest("cr-1") assert isinstance(collection, Collection) assert collection.version == 2 - @respx.mock + @responses.activate def test_get_component_release_collections(self, client): - respx.get(f"{BASE}/componentRelease/cr-1/collections").mock( - return_value=httpx.Response( - 200, - json=[ - {"uuid": "cr-1", "version": 1, "artifacts": []}, - {"uuid": "cr-1", "version": 2, "artifacts": []}, - ], - ) + responses.get( + f"{BASE}/componentRelease/cr-1/collections", + json=[ + {"uuid": "cr-1", "version": 1, "artifacts": []}, + {"uuid": "cr-1", "version": 2, "artifacts": []}, + ], ) collections = client.get_component_release_collections("cr-1") assert len(collections) == 2 - @respx.mock + @responses.activate def test_get_component_release_collection_by_version(self, client): - respx.get(f"{BASE}/componentRelease/cr-1/collection/3").mock( - return_value=httpx.Response(200, json={"uuid": "cr-1", "version": 3, "artifacts": []}) + responses.get( + f"{BASE}/componentRelease/cr-1/collection/3", + json={"uuid": "cr-1", "version": 3, "artifacts": []}, ) collection = client.get_component_release_collection("cr-1", 3) assert collection.version == 3 class TestArtifact: - @respx.mock + @responses.activate def test_get_artifact(self, client): - respx.get(f"{BASE}/artifact/art-1").mock( - return_value=httpx.Response( - 200, - json={ - "uuid": "art-1", - "name": "SBOM", - "type": "BOM", - "formats": [ - { - "mediaType": "application/json", - "url": "https://example.com/sbom.json", - "checksums": [], - } - ], - }, - ) + responses.get( + f"{BASE}/artifact/art-1", + json={ + "uuid": "art-1", + "name": "SBOM", + "type": "BOM", + "formats": [ + { + "mediaType": "application/json", + "url": "https://example.com/sbom.json", + "checksums": [], + } + ], + }, ) artifact = client.get_artifact("art-1") assert isinstance(artifact, Artifact) @@ -199,113 +182,105 @@ def test_get_artifact(self, client): class TestDiscovery: - @respx.mock + @responses.activate def test_discover(self, client): tei = "urn:tei:uuid:example.com:d4d9f54a-abcf-11ee-ac79-1a52914d44b" - route = respx.get(f"{BASE}/discovery").mock( - return_value=httpx.Response( - 200, - json=[ - { - "productReleaseUuid": "d4d9f54a-abcf-11ee-ac79-1a52914d44b", - "servers": [{"rootUrl": "https://api.example.com", "versions": ["1.0.0"]}], - } - ], - ) + responses.get( + f"{BASE}/discovery", + json=[ + { + "productReleaseUuid": "d4d9f54a-abcf-11ee-ac79-1a52914d44b", + "servers": [{"rootUrl": "https://api.example.com", "versions": ["1.0.0"]}], + } + ], ) results = client.discover(tei) assert len(results) == 1 assert results[0].product_release_uuid == "d4d9f54a-abcf-11ee-ac79-1a52914d44b" - # Verify TEI is NOT double-encoded (httpx auto-encodes params) - request = route.calls[0].request + # Verify TEI is NOT double-encoded (requests auto-encodes params) + request = responses.calls[0].request assert "tei=" in str(request.url) - @respx.mock + @responses.activate def test_discover_empty_result(self, client): - respx.get(f"{BASE}/discovery").mock(return_value=httpx.Response(200, json=[])) + responses.get(f"{BASE}/discovery", json=[]) results = client.discover("urn:tei:uuid:example.com:d4d9f54a") assert results == [] class TestFromWellKnown: - @respx.mock + @responses.activate def test_from_well_known_creates_client(self): - respx.get("https://example.com/.well-known/tea").mock( - return_value=httpx.Response( - 200, - json={ - "schemaVersion": 1, - "endpoints": [{"url": "https://api.example.com", "versions": ["0.3.0-beta.2"]}], - }, - ) + responses.get( + "https://example.com/.well-known/tea", + json={ + "schemaVersion": 1, + "endpoints": [{"url": "https://api.example.com", "versions": ["0.3.0-beta.2"]}], + }, ) client = TeaClient.from_well_known("example.com") assert client is not None client.close() - @respx.mock + @responses.activate def test_from_well_known_no_compatible_version_raises(self): from libtea.exceptions import TeaDiscoveryError - respx.get("https://example.com/.well-known/tea").mock( - return_value=httpx.Response( - 200, - json={ - "schemaVersion": 1, - "endpoints": [{"url": "https://api.example.com", "versions": ["99.0.0"]}], - }, - ) + responses.get( + "https://example.com/.well-known/tea", + json={ + "schemaVersion": 1, + "endpoints": [{"url": "https://api.example.com", "versions": ["99.0.0"]}], + }, ) with pytest.raises(TeaDiscoveryError): TeaClient.from_well_known("example.com") - @respx.mock + @responses.activate def test_from_well_known_passes_token(self): - respx.get("https://example.com/.well-known/tea").mock( - return_value=httpx.Response( - 200, - json={ - "schemaVersion": 1, - "endpoints": [{"url": "https://api.example.com", "versions": ["0.3.0-beta.2"]}], - }, - ) + responses.get( + "https://example.com/.well-known/tea", + json={ + "schemaVersion": 1, + "endpoints": [{"url": "https://api.example.com", "versions": ["0.3.0-beta.2"]}], + }, ) - route = respx.get("https://api.example.com/v0.3.0-beta.2/product/abc").mock( - return_value=httpx.Response(200, json={"uuid": "abc", "name": "P", "identifiers": []}) + responses.get( + "https://api.example.com/v0.3.0-beta.2/product/abc", + json={"uuid": "abc", "name": "P", "identifiers": []}, ) client = TeaClient.from_well_known("example.com", token="secret") client.get_product("abc") - assert route.calls[0].request.headers["authorization"] == "Bearer secret" + assert responses.calls[1].request.headers["authorization"] == "Bearer secret" client.close() class TestPagination: - @respx.mock + @responses.activate def test_get_product_releases_pagination_params(self, client): - route = respx.get(f"{BASE}/product/abc-123/releases").mock( - return_value=httpx.Response( - 200, - json={ - "timestamp": "2024-03-20T15:30:00Z", - "pageStartIndex": 50, - "pageSize": 25, - "totalResults": 200, - "results": [], - }, - ) + responses.get( + f"{BASE}/product/abc-123/releases", + json={ + "timestamp": "2024-03-20T15:30:00Z", + "pageStartIndex": 50, + "pageSize": 25, + "totalResults": 200, + "results": [], + }, ) resp = client.get_product_releases("abc-123", page_offset=50, page_size=25) - request = route.calls[0].request + request = responses.calls[0].request assert "pageOffset=50" in str(request.url) assert "pageSize=25" in str(request.url) assert resp.page_start_index == 50 class TestContextManager: - @respx.mock + @responses.activate def test_client_as_context_manager(self): - respx.get(f"{BASE}/component/c1").mock( - return_value=httpx.Response(200, json={"uuid": "c1", "name": "C1", "identifiers": []}) + responses.get( + f"{BASE}/component/c1", + json={"uuid": "c1", "name": "C1", "identifiers": []}, ) with TeaClient(base_url=BASE) as client: component = client.get_component("c1") diff --git a/tests/test_discovery.py b/tests/test_discovery.py index f3d52ed..28f7838 100644 --- a/tests/test_discovery.py +++ b/tests/test_discovery.py @@ -1,6 +1,6 @@ -import httpx import pytest -import respx +import requests +import responses from libtea.discovery import fetch_well_known, parse_tei, select_endpoint from libtea.exceptions import TeaDiscoveryError @@ -54,48 +54,46 @@ def test_tei_with_slash_in_purl_identifier(self): class TestFetchWellKnown: - @respx.mock + @responses.activate def test_fetch_well_known_success(self): - respx.get("https://example.com/.well-known/tea").mock( - return_value=httpx.Response( - 200, - json={ - "schemaVersion": 1, - "endpoints": [{"url": "https://api.example.com", "versions": ["1.0.0"]}], - }, - ) + responses.get( + "https://example.com/.well-known/tea", + json={ + "schemaVersion": 1, + "endpoints": [{"url": "https://api.example.com", "versions": ["1.0.0"]}], + }, ) wk = fetch_well_known("example.com") assert wk.schema_version == 1 assert len(wk.endpoints) == 1 - @respx.mock + @responses.activate def test_fetch_well_known_404_raises_discovery_error(self): - respx.get("https://example.com/.well-known/tea").mock(return_value=httpx.Response(404)) + responses.get("https://example.com/.well-known/tea", status=404) with pytest.raises(TeaDiscoveryError, match="HTTP 404"): fetch_well_known("example.com") - @respx.mock + @responses.activate def test_fetch_well_known_connection_error(self): - respx.get("https://example.com/.well-known/tea").mock(side_effect=httpx.ConnectError("refused")) + responses.get("https://example.com/.well-known/tea", body=requests.ConnectionError("refused")) with pytest.raises(TeaDiscoveryError, match="Failed to connect"): fetch_well_known("example.com") - @respx.mock + @responses.activate def test_fetch_well_known_500_raises_discovery_error(self): - respx.get("https://example.com/.well-known/tea").mock(return_value=httpx.Response(500)) + responses.get("https://example.com/.well-known/tea", status=500) with pytest.raises(TeaDiscoveryError): fetch_well_known("example.com") - @respx.mock + @responses.activate def test_fetch_well_known_non_json_raises_discovery_error(self): - respx.get("https://example.com/.well-known/tea").mock(return_value=httpx.Response(200, content=b"not json")) + responses.get("https://example.com/.well-known/tea", body="not json") with pytest.raises(TeaDiscoveryError, match="Invalid JSON"): fetch_well_known("example.com") - @respx.mock + @responses.activate def test_fetch_well_known_invalid_schema_raises_discovery_error(self): - respx.get("https://example.com/.well-known/tea").mock(return_value=httpx.Response(200, json={"bad": "data"})) + responses.get("https://example.com/.well-known/tea", json={"bad": "data"}) with pytest.raises(TeaDiscoveryError, match="Invalid .well-known/tea"): fetch_well_known("example.com") diff --git a/tests/test_download.py b/tests/test_download.py index 5538547..4d08d9a 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -1,8 +1,7 @@ import hashlib -import httpx import pytest -import respx +import responses from libtea.exceptions import TeaChecksumError from libtea.models import Checksum, ChecksumAlgorithm @@ -12,17 +11,17 @@ class TestDownloadArtifact: - @respx.mock + @responses.activate def test_download_without_checksum(self, client, tmp_path): - respx.get(ARTIFACT_URL).mock(return_value=httpx.Response(200, content=ARTIFACT_CONTENT)) + responses.get(ARTIFACT_URL, body=ARTIFACT_CONTENT) dest = tmp_path / "sbom.json" result = client.download_artifact(ARTIFACT_URL, dest) assert result == dest assert dest.read_bytes() == ARTIFACT_CONTENT - @respx.mock + @responses.activate def test_download_with_valid_checksum(self, client, tmp_path): - respx.get(ARTIFACT_URL).mock(return_value=httpx.Response(200, content=ARTIFACT_CONTENT)) + responses.get(ARTIFACT_URL, body=ARTIFACT_CONTENT) sha256 = hashlib.sha256(ARTIFACT_CONTENT).hexdigest() checksums = [Checksum(alg_type=ChecksumAlgorithm.SHA_256, alg_value=sha256)] dest = tmp_path / "sbom.json" @@ -30,18 +29,18 @@ def test_download_with_valid_checksum(self, client, tmp_path): assert result == dest assert dest.exists() - @respx.mock + @responses.activate def test_download_with_invalid_checksum_deletes_file(self, client, tmp_path): - respx.get(ARTIFACT_URL).mock(return_value=httpx.Response(200, content=ARTIFACT_CONTENT)) + responses.get(ARTIFACT_URL, body=ARTIFACT_CONTENT) checksums = [Checksum(alg_type=ChecksumAlgorithm.SHA_256, alg_value="badhash")] dest = tmp_path / "sbom.json" with pytest.raises(TeaChecksumError, match="SHA-256"): client.download_artifact(ARTIFACT_URL, dest, verify_checksums=checksums) assert not dest.exists() - @respx.mock + @responses.activate def test_download_with_multiple_checksums(self, client, tmp_path): - respx.get(ARTIFACT_URL).mock(return_value=httpx.Response(200, content=ARTIFACT_CONTENT)) + responses.get(ARTIFACT_URL, body=ARTIFACT_CONTENT) sha256 = hashlib.sha256(ARTIFACT_CONTENT).hexdigest() sha1 = hashlib.sha1(ARTIFACT_CONTENT).hexdigest() checksums = [ @@ -52,9 +51,9 @@ def test_download_with_multiple_checksums(self, client, tmp_path): result = client.download_artifact(ARTIFACT_URL, dest, verify_checksums=checksums) assert result == dest - @respx.mock + @responses.activate def test_download_checksum_uppercase_hex_accepted(self, client, tmp_path): - respx.get(ARTIFACT_URL).mock(return_value=httpx.Response(200, content=ARTIFACT_CONTENT)) + responses.get(ARTIFACT_URL, body=ARTIFACT_CONTENT) sha256 = hashlib.sha256(ARTIFACT_CONTENT).hexdigest().upper() checksums = [Checksum(alg_type=ChecksumAlgorithm.SHA_256, alg_value=sha256)] dest = tmp_path / "sbom.json" @@ -67,17 +66,12 @@ def test_download_with_blake3_raises_clear_error(self, client, tmp_path): with pytest.raises(TeaChecksumError, match="BLAKE3"): client.download_artifact(ARTIFACT_URL, dest, verify_checksums=checksums) - @respx.mock + @responses.activate def test_download_with_unknown_algorithm_raises_clear_error(self, client, tmp_path): """If an algorithm has no hashlib mapping, verification should raise explicitly.""" - respx.get(ARTIFACT_URL).mock(return_value=httpx.Response(200, content=ARTIFACT_CONTENT)) + responses.get(ARTIFACT_URL, body=ARTIFACT_CONTENT) checksums = [Checksum(alg_type=ChecksumAlgorithm.BLAKE3, alg_value="abc123")] dest = tmp_path / "sbom.json" - # BLAKE3 raises before download, so test with a checksum whose algorithm - # was silently skipped during hashing (simulated by providing BLAKE3 in - # verify_checksums but not in the download algorithms list). - # Instead, we test the path by calling download_artifact with a checksum - # that has an algorithm not in the computed dict. from unittest.mock import patch # Patch download_with_hashes to return empty dict (no algorithms computed) diff --git a/tests/test_http.py b/tests/test_http.py index 0bbbde4..35f8e74 100644 --- a/tests/test_http.py +++ b/tests/test_http.py @@ -1,6 +1,6 @@ -import httpx import pytest -import respx +import requests +import responses from libtea._http import TeaHttpClient from libtea.exceptions import ( @@ -14,125 +14,119 @@ class TestTeaHttpClient: - @respx.mock + @responses.activate def test_get_json_success(self, http_client, base_url): - respx.get(f"{base_url}/product/abc").mock( - return_value=httpx.Response(200, json={"uuid": "abc", "name": "Test"}) - ) + responses.get(f"{base_url}/product/abc", json={"uuid": "abc", "name": "Test"}) data = http_client.get_json("/product/abc") assert data == {"uuid": "abc", "name": "Test"} - @respx.mock + @responses.activate def test_get_json_with_bearer_token(self, base_url): - route = respx.get(f"{base_url}/product/abc").mock(return_value=httpx.Response(200, json={"uuid": "abc"})) + responses.get(f"{base_url}/product/abc", json={"uuid": "abc"}) client = TeaHttpClient(base_url=base_url, token="my-token") client.get_json("/product/abc") - assert route.calls[0].request.headers["authorization"] == "Bearer my-token" + assert responses.calls[0].request.headers["authorization"] == "Bearer my-token" client.close() - @respx.mock + @responses.activate def test_404_raises_not_found_with_error_type(self, http_client, base_url): - respx.get(f"{base_url}/product/missing").mock( - return_value=httpx.Response(404, json={"error": "OBJECT_UNKNOWN"}) - ) + responses.get(f"{base_url}/product/missing", json={"error": "OBJECT_UNKNOWN"}, status=404) with pytest.raises(TeaNotFoundError) as exc_info: http_client.get_json("/product/missing") assert exc_info.value.error_type == "OBJECT_UNKNOWN" - @respx.mock + @responses.activate def test_404_with_object_not_shareable(self, http_client, base_url): - respx.get(f"{base_url}/product/restricted").mock( - return_value=httpx.Response(404, json={"error": "OBJECT_NOT_SHAREABLE"}) - ) + responses.get(f"{base_url}/product/restricted", json={"error": "OBJECT_NOT_SHAREABLE"}, status=404) with pytest.raises(TeaNotFoundError) as exc_info: http_client.get_json("/product/restricted") assert exc_info.value.error_type == "OBJECT_NOT_SHAREABLE" - @respx.mock + @responses.activate def test_404_with_non_json_body(self, http_client, base_url): - respx.get(f"{base_url}/product/missing").mock(return_value=httpx.Response(404, content=b"Not Found")) + responses.get(f"{base_url}/product/missing", body="Not Found", status=404) with pytest.raises(TeaNotFoundError) as exc_info: http_client.get_json("/product/missing") assert exc_info.value.error_type is None - @respx.mock + @responses.activate def test_401_raises_auth_error(self, http_client, base_url): - respx.get(f"{base_url}/product/abc").mock(return_value=httpx.Response(401)) + responses.get(f"{base_url}/product/abc", status=401) with pytest.raises(TeaAuthenticationError): http_client.get_json("/product/abc") - @respx.mock + @responses.activate def test_403_raises_auth_error(self, http_client, base_url): - respx.get(f"{base_url}/product/abc").mock(return_value=httpx.Response(403)) + responses.get(f"{base_url}/product/abc", status=403) with pytest.raises(TeaAuthenticationError): http_client.get_json("/product/abc") - @respx.mock + @responses.activate def test_400_raises_request_error(self, http_client, base_url): - respx.get(f"{base_url}/product/abc").mock(return_value=httpx.Response(400)) + responses.get(f"{base_url}/product/abc", status=400) with pytest.raises(TeaRequestError): http_client.get_json("/product/abc") - @respx.mock + @responses.activate def test_500_raises_server_error(self, http_client, base_url): - respx.get(f"{base_url}/product/abc").mock(return_value=httpx.Response(500)) + responses.get(f"{base_url}/product/abc", status=500) with pytest.raises(TeaServerError): http_client.get_json("/product/abc") - @respx.mock + @responses.activate def test_502_raises_server_error(self, http_client, base_url): - respx.get(f"{base_url}/product/abc").mock(return_value=httpx.Response(502)) + responses.get(f"{base_url}/product/abc", status=502) with pytest.raises(TeaServerError): http_client.get_json("/product/abc") - @respx.mock + @responses.activate def test_connection_error(self, http_client, base_url): - respx.get(f"{base_url}/product/abc").mock(side_effect=httpx.ConnectError("refused")) + responses.get(f"{base_url}/product/abc", body=requests.ConnectionError("refused")) with pytest.raises(TeaConnectionError): http_client.get_json("/product/abc") - @respx.mock + @responses.activate def test_timeout_raises_connection_error(self, http_client, base_url): - respx.get(f"{base_url}/product/abc").mock(side_effect=httpx.TimeoutException("timed out")) + responses.get(f"{base_url}/product/abc", body=requests.Timeout("timed out")) with pytest.raises(TeaConnectionError, match="timed out"): http_client.get_json("/product/abc") - @respx.mock - def test_stream_to_file(self, http_client, base_url, tmp_path): + @responses.activate + def test_stream_to_file(self, http_client, tmp_path): content = b"file content here" - respx.get("https://artifacts.example.com/sbom.xml").mock(return_value=httpx.Response(200, content=content)) + responses.get("https://artifacts.example.com/sbom.xml", body=content) dest = tmp_path / "sbom.xml" http_client.download_with_hashes(url="https://artifacts.example.com/sbom.xml", dest=dest) assert dest.read_bytes() == content - @respx.mock + @responses.activate def test_download_cleans_up_partial_file_on_transport_error(self, http_client, tmp_path): - respx.get("https://artifacts.example.com/sbom.xml").mock(side_effect=httpx.ConnectError("refused")) + responses.get("https://artifacts.example.com/sbom.xml", body=requests.ConnectionError("refused")) dest = tmp_path / "sbom.xml" with pytest.raises(TeaConnectionError): http_client.download_with_hashes(url="https://artifacts.example.com/sbom.xml", dest=dest) assert not dest.exists() - @respx.mock + @responses.activate def test_get_json_non_json_response_raises_validation_error(self, http_client, base_url): - respx.get(f"{base_url}/product/abc").mock(return_value=httpx.Response(200, content=b"not json")) + responses.get(f"{base_url}/product/abc", body="not json", status=200) with pytest.raises(TeaValidationError, match="Invalid JSON"): http_client.get_json("/product/abc") - @respx.mock + @responses.activate def test_download_creates_parent_directories(self, http_client, tmp_path): content = b"nested file" - respx.get("https://artifacts.example.com/sbom.xml").mock(return_value=httpx.Response(200, content=content)) + responses.get("https://artifacts.example.com/sbom.xml", body=content) dest = tmp_path / "a" / "b" / "sbom.xml" http_client.download_with_hashes(url="https://artifacts.example.com/sbom.xml", dest=dest) assert dest.read_bytes() == content - @respx.mock + @responses.activate def test_user_agent_includes_version(self, base_url): - route = respx.get(f"{base_url}/product/abc").mock(return_value=httpx.Response(200, json={"uuid": "abc"})) + responses.get(f"{base_url}/product/abc", json={"uuid": "abc"}) client = TeaHttpClient(base_url=base_url) client.get_json("/product/abc") - ua = route.calls[0].request.headers["user-agent"] + ua = responses.calls[0].request.headers["user-agent"] assert ua.startswith("py-libtea/") assert "hello@sbomify.com" in ua client.close() diff --git a/uv.lock b/uv.lock index 35a2d6f..84a3a7c 100644 --- a/uv.lock +++ b/uv.lock @@ -11,19 +11,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, ] -[[package]] -name = "anyio" -version = "4.12.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "idna" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" }, -] - [[package]] name = "certifi" version = "2026.2.25" @@ -42,6 +29,79 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/db/3c/33bac158f8ab7f89b2e59426d5fe2e4f63f7ed25df84c036890172b412b5/cfgv-3.5.0-py2.py3-none-any.whl", hash = "sha256:a8dc6b26ad22ff227d2634a65cb388215ce6cc96bbcc5cfde7641ae87e8dacc0", size = 7445, upload-time = "2025-11-19T20:55:50.744Z" }, ] +[[package]] +name = "charset-normalizer" +version = "3.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/27/c6491ff4954e58a10f69ad90aca8a1b6fe9c5d3c6f380907af3c37435b59/charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8", size = 206988, upload-time = "2025-10-14T04:40:33.79Z" }, + { url = "https://files.pythonhosted.org/packages/94/59/2e87300fe67ab820b5428580a53cad894272dbb97f38a7a814a2a1ac1011/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0", size = 147324, upload-time = "2025-10-14T04:40:34.961Z" }, + { url = "https://files.pythonhosted.org/packages/07/fb/0cf61dc84b2b088391830f6274cb57c82e4da8bbc2efeac8c025edb88772/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3", size = 142742, upload-time = "2025-10-14T04:40:36.105Z" }, + { url = "https://files.pythonhosted.org/packages/62/8b/171935adf2312cd745d290ed93cf16cf0dfe320863ab7cbeeae1dcd6535f/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc", size = 160863, upload-time = "2025-10-14T04:40:37.188Z" }, + { url = "https://files.pythonhosted.org/packages/09/73/ad875b192bda14f2173bfc1bc9a55e009808484a4b256748d931b6948442/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897", size = 157837, upload-time = "2025-10-14T04:40:38.435Z" }, + { url = "https://files.pythonhosted.org/packages/6d/fc/de9cce525b2c5b94b47c70a4b4fb19f871b24995c728e957ee68ab1671ea/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381", size = 151550, upload-time = "2025-10-14T04:40:40.053Z" }, + { url = "https://files.pythonhosted.org/packages/55/c2/43edd615fdfba8c6f2dfbd459b25a6b3b551f24ea21981e23fb768503ce1/charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815", size = 149162, upload-time = "2025-10-14T04:40:41.163Z" }, + { url = "https://files.pythonhosted.org/packages/03/86/bde4ad8b4d0e9429a4e82c1e8f5c659993a9a863ad62c7df05cf7b678d75/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0", size = 150019, upload-time = "2025-10-14T04:40:42.276Z" }, + { url = "https://files.pythonhosted.org/packages/1f/86/a151eb2af293a7e7bac3a739b81072585ce36ccfb4493039f49f1d3cae8c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161", size = 143310, upload-time = "2025-10-14T04:40:43.439Z" }, + { url = "https://files.pythonhosted.org/packages/b5/fe/43dae6144a7e07b87478fdfc4dbe9efd5defb0e7ec29f5f58a55aeef7bf7/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4", size = 162022, upload-time = "2025-10-14T04:40:44.547Z" }, + { url = "https://files.pythonhosted.org/packages/80/e6/7aab83774f5d2bca81f42ac58d04caf44f0cc2b65fc6db2b3b2e8a05f3b3/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89", size = 149383, upload-time = "2025-10-14T04:40:46.018Z" }, + { url = "https://files.pythonhosted.org/packages/4f/e8/b289173b4edae05c0dde07f69f8db476a0b511eac556dfe0d6bda3c43384/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569", size = 159098, upload-time = "2025-10-14T04:40:47.081Z" }, + { url = "https://files.pythonhosted.org/packages/d8/df/fe699727754cae3f8478493c7f45f777b17c3ef0600e28abfec8619eb49c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224", size = 152991, upload-time = "2025-10-14T04:40:48.246Z" }, + { url = "https://files.pythonhosted.org/packages/1a/86/584869fe4ddb6ffa3bd9f491b87a01568797fb9bd8933f557dba9771beaf/charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a", size = 99456, upload-time = "2025-10-14T04:40:49.376Z" }, + { url = "https://files.pythonhosted.org/packages/65/f6/62fdd5feb60530f50f7e38b4f6a1d5203f4d16ff4f9f0952962c044e919a/charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016", size = 106978, upload-time = "2025-10-14T04:40:50.844Z" }, + { url = "https://files.pythonhosted.org/packages/7a/9d/0710916e6c82948b3be62d9d398cb4fcf4e97b56d6a6aeccd66c4b2f2bd5/charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1", size = 99969, upload-time = "2025-10-14T04:40:52.272Z" }, + { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" }, + { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" }, + { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" }, + { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" }, + { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" }, + { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" }, + { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" }, + { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" }, + { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" }, + { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" }, + { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" }, + { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" }, + { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" }, + { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" }, + { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" }, + { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" }, + { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" }, + { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" }, + { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" }, + { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" }, + { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" }, + { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" }, + { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" }, + { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" }, + { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" }, + { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" }, + { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" }, + { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" }, + { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" }, + { url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" }, + { url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" }, + { url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" }, + { url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" }, + { url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" }, + { url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" }, + { url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" }, + { url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" }, + { url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" }, + { url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" }, + { url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" }, + { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" }, + { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, +] + [[package]] name = "colorama" version = "0.4.6" @@ -173,43 +233,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/9c/0f/5d0c71a1aefeb08efff26272149e07ab922b64f46c63363756224bd6872e/filelock-3.24.3-py3-none-any.whl", hash = "sha256:426e9a4660391f7f8a810d71b0555bce9008b0a1cc342ab1f6947d37639e002d", size = 24331, upload-time = "2026-02-19T00:48:18.465Z" }, ] -[[package]] -name = "h11" -version = "0.16.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, -] - -[[package]] -name = "httpcore" -version = "1.0.9" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "h11" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, -] - -[[package]] -name = "httpx" -version = "0.28.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "certifi" }, - { name = "httpcore" }, - { name = "idna" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, -] - [[package]] name = "identify" version = "2.6.16" @@ -242,8 +265,8 @@ name = "libtea" version = "0.1.0" source = { editable = "." } dependencies = [ - { name = "httpx" }, { name = "pydantic" }, + { name = "requests" }, ] [package.dev-dependencies] @@ -251,14 +274,14 @@ dev = [ { name = "pre-commit" }, { name = "pytest" }, { name = "pytest-cov" }, - { name = "respx" }, + { name = "responses" }, { name = "ruff" }, ] [package.metadata] requires-dist = [ - { name = "httpx", specifier = ">=0.27.0,<1" }, { name = "pydantic", specifier = ">=2.1.0,<3" }, + { name = "requests", specifier = ">=2.31.0,<3" }, ] [package.metadata.requires-dev] @@ -266,7 +289,7 @@ dev = [ { name = "pre-commit", specifier = ">=4.2.0,<5" }, { name = "pytest", specifier = ">=8.0.0,<9" }, { name = "pytest-cov", specifier = ">=4.1.0,<5" }, - { name = "respx", specifier = ">=0.22.0,<1" }, + { name = "responses", specifier = ">=0.25.0,<1" }, { name = "ruff", specifier = ">=0.12.0,<0.13" }, ] @@ -528,15 +551,32 @@ wheels = [ ] [[package]] -name = "respx" -version = "0.22.0" +name = "requests" +version = "2.32.5" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "httpx" }, + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f4/7c/96bd0bc759cf009675ad1ee1f96535edcb11e9666b985717eb8c87192a95/respx-0.22.0.tar.gz", hash = "sha256:3c8924caa2a50bd71aefc07aa812f2466ff489f1848c96e954a5362d17095d91", size = 28439, upload-time = "2024-12-19T22:33:59.374Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8e/67/afbb0978d5399bc9ea200f1d4489a23c9a1dad4eee6376242b8182389c79/respx-0.22.0-py2.py3-none-any.whl", hash = "sha256:631128d4c9aba15e56903fb5f66fb1eff412ce28dd387ca3a81339e52dbd3ad0", size = 25127, upload-time = "2024-12-19T22:33:57.837Z" }, + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, +] + +[[package]] +name = "responses" +version = "0.26.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyyaml" }, + { name = "requests" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/b4/b7e040379838cc71bf5aabdb26998dfbe5ee73904c92c1c161faf5de8866/responses-0.26.0.tar.gz", hash = "sha256:c7f6923e6343ef3682816ba421c006626777893cb0d5e1434f674b649bac9eb4", size = 81303, upload-time = "2026-02-19T14:38:05.574Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ce/04/7f73d05b556da048923e31a0cc878f03be7c5425ed1f268082255c75d872/responses-0.26.0-py3-none-any.whl", hash = "sha256:03ec4409088cd5c66b71ecbbbd27fe2c58ddfad801c66203457b3e6a04868c37", size = 35099, upload-time = "2026-02-19T14:38:03.847Z" }, ] [[package]] @@ -640,6 +680,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, ] +[[package]] +name = "urllib3" +version = "2.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, +] + [[package]] name = "virtualenv" version = "20.39.0" From 10438c66aa57ae5de53876d7859fa18801dd2497 Mon Sep 17 00:00:00 2001 From: Rana Aurangzaib Date: Wed, 25 Feb 2026 17:00:49 +0300 Subject: [PATCH 10/17] Add integration tests for TEA API client - Introduced a new test file `test_integration.py` containing integration tests for the TEA API client. - Implemented a full consumer flow test that verifies the retrieval of product, component releases, collections, and artifacts using example data from the TEA OpenAPI spec. - Utilized the `responses` library to mock HTTP requests and validate the expected behavior of the `TeaClient` methods. --- tests/test_integration.py | 125 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 125 insertions(+) create mode 100644 tests/test_integration.py diff --git a/tests/test_integration.py b/tests/test_integration.py new file mode 100644 index 0000000..27c3d89 --- /dev/null +++ b/tests/test_integration.py @@ -0,0 +1,125 @@ +"""Integration tests using example data from the TEA OpenAPI spec.""" + +import responses + +from libtea.client import TeaClient +from libtea.models import ArtifactType, ChecksumAlgorithm, IdentifierType +from tests.conftest import BASE_URL as BASE + +# Example JSON taken directly from the TEA OpenAPI spec +LOG4J_PRODUCT = { + "uuid": "09e8c73b-ac45-4475-acac-33e6a7314e6d", + "name": "Apache Log4j 2", + "identifiers": [ + {"idType": "CPE", "idValue": "cpe:2.3:a:apache:log4j"}, + {"idType": "PURL", "idValue": "pkg:maven/org.apache.logging.log4j/log4j-api"}, + ], +} + +TOMCAT_RELEASE = { + "uuid": "605d0ecb-1057-40e4-9abf-c400b10f0345", + "version": "11.0.7", + "createdDate": "2025-05-07T18:08:00Z", + "releaseDate": "2025-05-12T18:08:00Z", + "identifiers": [{"idType": "PURL", "idValue": "pkg:maven/org.apache.tomcat/tomcat@11.0.7"}], + "distributions": [ + { + "distributionType": "zip", + "description": "Core binary distribution, zip archive", + "identifiers": [{"idType": "PURL", "idValue": "pkg:maven/org.apache.tomcat/tomcat@11.0.6?type=zip"}], + "checksums": [ + {"algType": "SHA-256", "algValue": "9da736a1cdd27231e70187cbc67398d29ca0b714f885e7032da9f1fb247693c1"} + ], + "url": "https://repo.maven.apache.org/maven2/org/apache/tomcat/tomcat/11.0.7/tomcat-11.0.6.zip", + "signatureUrl": "https://repo.maven.apache.org/maven2/org/apache/tomcat/tomcat/11.0.7/tomcat-11.0.6.zip.asc", + } + ], +} + +LOG4J_COLLECTION = { + "uuid": "4c72fe22-9d83-4c2f-8eba-d6db484f32c8", + "version": 3, + "date": "2024-12-13T00:00:00Z", + "belongsTo": "COMPONENT_RELEASE", + "updateReason": {"type": "ARTIFACT_UPDATED", "comment": "VDR file updated"}, + "artifacts": [ + { + "uuid": "1cb47b95-8bf8-3bad-a5a4-0d54d86e10ce", + "name": "Build SBOM", + "type": "BOM", + "formats": [ + { + "mediaType": "application/vnd.cyclonedx+xml", + "description": "CycloneDX SBOM (XML)", + "url": "https://repo.maven.apache.org/maven2/log4j-core-2.24.3-cyclonedx.xml", + "signatureUrl": "https://repo.maven.apache.org/maven2/log4j-core-2.24.3-cyclonedx.xml.asc", + "checksums": [ + {"algType": "MD5", "algValue": "2e1a525afc81b0a8ecff114b8b743de9"}, + {"algType": "SHA-1", "algValue": "5a7d4caef63c5c5ccdf07c39337323529eb5a770"}, + ], + } + ], + }, + { + "uuid": "dfa35519-9734-4259-bba1-3e825cf4be06", + "name": "Vulnerability Disclosure Report", + "type": "VULNERABILITIES", + "formats": [ + { + "mediaType": "application/vnd.cyclonedx+xml", + "description": "CycloneDX VDR (XML)", + "url": "https://logging.apache.org/cyclonedx/vdr.xml", + "checksums": [ + { + "algType": "SHA-256", + "algValue": "75b81020b3917cb682b1a7605ade431e062f7a4c01a412f0b87543b6e995ad2a", + } + ], + } + ], + }, + ], +} + + +class TestSpecExamples: + @responses.activate + def test_full_consumer_flow(self): + """Test the full consumer flow: product -> component releases -> collection -> artifacts.""" + product_uuid = LOG4J_PRODUCT["uuid"] + release_uuid = TOMCAT_RELEASE["uuid"] + + responses.get(f"{BASE}/product/{product_uuid}", json=LOG4J_PRODUCT) + responses.get( + f"{BASE}/componentRelease/{release_uuid}", + json={ + "release": TOMCAT_RELEASE, + "latestCollection": LOG4J_COLLECTION, + }, + ) + responses.get(f"{BASE}/componentRelease/{release_uuid}/collection/latest", json=LOG4J_COLLECTION) + + with TeaClient(base_url=BASE) as client: + # Step 1: Get product + product = client.get_product(product_uuid) + assert product.name == "Apache Log4j 2" + assert product.identifiers[0].id_type == IdentifierType.CPE + + # Step 2: Get component release with collection + cr = client.get_component_release(release_uuid) + assert cr.release.version == "11.0.7" + assert cr.release.distributions[0].distribution_type == "zip" + assert cr.release.distributions[0].checksums[0].alg_type == ChecksumAlgorithm.SHA_256 + + # Step 3: Get latest collection + collection = client.get_component_release_collection_latest(release_uuid) + assert collection.version == 3 + assert len(collection.artifacts) == 2 + + # Step 4: Inspect artifacts + sbom = collection.artifacts[0] + assert sbom.type == ArtifactType.BOM + assert sbom.formats[0].media_type == "application/vnd.cyclonedx+xml" + + vdr = collection.artifacts[1] + assert vdr.type == ArtifactType.VULNERABILITIES From d0131aa1b40dca75afc1177320d8136b18512231 Mon Sep 17 00:00:00 2001 From: Rana Aurangzaib Date: Wed, 25 Feb 2026 19:06:44 +0300 Subject: [PATCH 11/17] Enhance README and API client with new features and validations - Updated README to include CI badges, improved usage examples, and clarified TEA API functionality. - Added `search_products` and `search_product_releases` methods to `TeaClient` for searching by identifier with pagination support. - Enhanced TEI parsing in `discovery.py` to validate TEI types and domain formats, improving error handling. - Updated `ComponentReleaseWithCollection` model to allow `latest_collection` to be `None`. - Added comprehensive tests for new search functionalities and TEI validation to ensure robustness. --- README.md | 115 ++++++++++++++++++++---------- libtea/_http.py | 8 ++- libtea/client.py | 21 ++++++ libtea/discovery.py | 13 ++++ libtea/models.py | 2 +- tests/test_client.py | 153 ++++++++++++++++++++++++++++++++++++++++ tests/test_discovery.py | 34 +++++++++ tests/test_http.py | 78 +++++++++++++++++++- 8 files changed, 382 insertions(+), 42 deletions(-) diff --git a/README.md b/README.md index 1314fda..402e776 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,14 @@ # libtea +[![CI](https://github.com/sbomify/py-libtea/actions/workflows/ci.yaml/badge.svg)](https://github.com/sbomify/py-libtea/actions/workflows/ci.yaml) +[![PyPI version](https://img.shields.io/pypi/v/libtea.svg)](https://pypi.org/project/libtea/) +[![Python](https://img.shields.io/pypi/pyversions/libtea.svg)](https://pypi.org/project/libtea/) +[![License](https://img.shields.io/github/license/sbomify/py-libtea.svg)](https://github.com/sbomify/py-libtea/blob/master/LICENSE) + Python client library for the [Transparency Exchange API (TEA)](https://transparency.exchange/) v0.3.0-beta.2. +TEA is an open standard for discovering and retrieving software transparency artifacts (SBOMs, VEX, build metadata) for any software product or component. + > **Status**: Alpha — API is subject to change. ## Installation @@ -15,14 +22,22 @@ pip install libtea ```python from libtea import TeaClient -# Connect directly -client = TeaClient(base_url="https://api.example.com/tea/v0.3.0-beta.2") +# Auto-discover from a domain's .well-known/tea +with TeaClient.from_well_known("example.com", token="your-bearer-token") as client: + # Browse a product + product = client.get_product("product-uuid") + print(product.name) -# Or auto-discover from a domain's .well-known/tea -client = TeaClient.from_well_known("example.com") + # Get a component release with its latest collection + cr = client.get_component_release("release-uuid") + for artifact in cr.latest_collection.artifacts: + print(artifact.name, artifact.type) +``` + +Or connect directly to a known endpoint: -# With authentication -client = TeaClient.from_well_known("example.com", token="your-bearer-token") +```python +client = TeaClient(base_url="https://api.example.com/tea/v0.3.0-beta.2") ``` ## Usage @@ -42,23 +57,25 @@ with TeaClient.from_well_known("example.com") as client: ### Components ```python -component = client.get_component("component-uuid") -releases = client.get_component_releases("component-uuid") +with TeaClient(base_url="https://api.example.com/tea/v1") as client: + component = client.get_component("component-uuid") + releases = client.get_component_releases("component-uuid") -# Get a component release with its latest collection -cr = client.get_component_release("release-uuid") -print(cr.release.version, len(cr.latest_collection.artifacts)) + # Get a component release with its latest collection + cr = client.get_component_release("release-uuid") + print(cr.release.version, len(cr.latest_collection.artifacts)) ``` ### Collections and artifacts ```python -collection = client.get_component_release_collection_latest("release-uuid") -for artifact in collection.artifacts: - print(artifact.name, artifact.type) +with TeaClient(base_url="https://api.example.com/tea/v1") as client: + collection = client.get_component_release_collection_latest("release-uuid") + for artifact in collection.artifacts: + print(artifact.name, artifact.type) -# Specific collection version -collection_v3 = client.get_component_release_collection("release-uuid", 3) + # Specific collection version + collection_v3 = client.get_component_release_collection("release-uuid", 3) ``` ### Downloading artifacts with checksum verification @@ -66,23 +83,33 @@ collection_v3 = client.get_component_release_collection("release-uuid", 3) ```python from pathlib import Path -artifact = client.get_artifact("artifact-uuid") -fmt = artifact.formats[0] +with TeaClient(base_url="https://api.example.com/tea/v1") as client: + artifact = client.get_artifact("artifact-uuid") + fmt = artifact.formats[0] -# Downloads and verifies checksums on-the-fly -client.download_artifact( - fmt.url, - Path("sbom.json"), - verify_checksums=fmt.checksums, -) + # Downloads and verifies checksums on-the-fly + client.download_artifact( + fmt.url, + Path("sbom.json"), + verify_checksums=fmt.checksums, + ) ``` -### Discovery via TEI +### Discovery ```python -results = client.discover("urn:tei:uuid:example.com:d4d9f54a-abcf-11ee-ac79-1a52914d44b") -for info in results: - print(info.product_release_uuid, info.servers) +from libtea.discovery import parse_tei + +# Parse a TEI URN +tei_type, domain, identifier = parse_tei( + "urn:tei:purl:cyclonedx.org:pkg:pypi/cyclonedx-python-lib@8.4.0" +) + +# Discover product releases by TEI +with TeaClient(base_url="https://api.example.com/tea/v1") as client: + results = client.discover("urn:tei:uuid:example.com:d4d9f54a-abcf-11ee-ac79-1a52914d44b") + for info in results: + print(info.product_release_uuid, info.servers) ``` ## Error handling @@ -90,19 +117,29 @@ for info in results: All exceptions inherit from `TeaError`: ```python -from libtea.exceptions import ( - TeaError, # Base exception - TeaConnectionError, # Network failure or timeout - TeaAuthenticationError,# HTTP 401/403 - TeaNotFoundError, # HTTP 404 (has .error_type: "OBJECT_UNKNOWN" or "OBJECT_NOT_SHAREABLE") - TeaRequestError, # HTTP 4xx (other) - TeaServerError, # HTTP 5xx - TeaDiscoveryError, # Invalid TEI, .well-known failure, or no compatible endpoint - TeaChecksumError, # Checksum mismatch (has .algorithm, .expected, .actual) - TeaValidationError, # Malformed server response -) +from libtea.exceptions import TeaError, TeaNotFoundError, TeaChecksumError + +try: + product = client.get_product("unknown-uuid") +except TeaNotFoundError as exc: + print(exc.error_type) # "OBJECT_UNKNOWN" or "OBJECT_NOT_SHAREABLE" +except TeaError: + print("Something went wrong") ``` +Exception hierarchy: + +| Exception | When | +|-----------|------| +| `TeaConnectionError` | Network failure or timeout | +| `TeaAuthenticationError` | HTTP 401/403 | +| `TeaNotFoundError` | HTTP 404 (`.error_type` has the TEA error code) | +| `TeaRequestError` | Other HTTP 4xx | +| `TeaServerError` | HTTP 5xx | +| `TeaDiscoveryError` | Invalid TEI, `.well-known` failure, or no compatible endpoint | +| `TeaChecksumError` | Checksum mismatch (`.algorithm`, `.expected`, `.actual`) | +| `TeaValidationError` | Malformed server response | + ## Requirements - Python >= 3.11 diff --git a/libtea/_http.py b/libtea/_http.py index 526d676..b4ddf6f 100644 --- a/libtea/_http.py +++ b/libtea/_http.py @@ -3,6 +3,7 @@ import hashlib from pathlib import Path from typing import Any +from urllib.parse import urlparse import requests @@ -49,7 +50,12 @@ def __init__( token: str | None = None, timeout: float = 30.0, ): - self._base_url = base_url.rstrip("/") + parsed = urlparse(base_url) + if parsed.scheme not in ("http", "https"): + raise ValueError(f"base_url must use http or https scheme, got {parsed.scheme!r}") + if not parsed.hostname: + raise ValueError(f"base_url must include a hostname: {base_url!r}") + self._base_url = parsed.geturl().rstrip("/") self._timeout = timeout self._session = requests.Session() self._session.headers["user-agent"] = USER_AGENT diff --git a/libtea/client.py b/libtea/client.py index 583d43e..aa0b2fb 100644 --- a/libtea/client.py +++ b/libtea/client.py @@ -17,6 +17,7 @@ ComponentReleaseWithCollection, DiscoveryInfo, PaginatedProductReleaseResponse, + PaginatedProductResponse, Product, ProductRelease, Release, @@ -77,6 +78,16 @@ def discover(self, tei: str) -> list[DiscoveryInfo]: # --- Products --- + def search_products( + self, id_type: str, id_value: str, *, page_offset: int = 0, page_size: int = 100 + ) -> PaginatedProductResponse: + """Search for products by identifier (e.g. PURL, CPE, TEI).""" + data = self._http.get_json( + "/products", + params={"idType": id_type, "idValue": id_value, "pageOffset": page_offset, "pageSize": page_size}, + ) + return _validate(PaginatedProductResponse, data) + def get_product(self, uuid: str) -> Product: data = self._http.get_json(f"/product/{uuid}") return _validate(Product, data) @@ -92,6 +103,16 @@ def get_product_releases( # --- Product Releases --- + def search_product_releases( + self, id_type: str, id_value: str, *, page_offset: int = 0, page_size: int = 100 + ) -> PaginatedProductReleaseResponse: + """Search for product releases by identifier (e.g. PURL, CPE, TEI).""" + data = self._http.get_json( + "/productReleases", + params={"idType": id_type, "idValue": id_value, "pageOffset": page_offset, "pageSize": page_size}, + ) + return _validate(PaginatedProductReleaseResponse, data) + def get_product_release(self, uuid: str) -> ProductRelease: data = self._http.get_json(f"/productRelease/{uuid}") return _validate(ProductRelease, data) diff --git a/libtea/discovery.py b/libtea/discovery.py index 483f186..16bd399 100644 --- a/libtea/discovery.py +++ b/libtea/discovery.py @@ -1,10 +1,17 @@ """TEI parsing, .well-known/tea fetching, and endpoint selection.""" +import re + import requests from libtea.exceptions import TeaDiscoveryError from libtea.models import TeaEndpoint, TeaWellKnown +_VALID_TEI_TYPES = frozenset({"uuid", "purl", "hash", "swid", "eanupc", "gtin", "asin", "udi"}) +_DOMAIN_RE = re.compile( + r"^[a-zA-Z0-9]([a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])?(\.[a-zA-Z0-9]([a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])?)*$" +) + def parse_tei(tei: str) -> tuple[str, str, str]: """Parse a TEI URN into (type, domain, identifier). @@ -17,7 +24,13 @@ def parse_tei(tei: str) -> tuple[str, str, str]: raise TeaDiscoveryError(f"Invalid TEI: {tei!r}. Expected format: urn:tei:::") tei_type = parts[2] + if tei_type not in _VALID_TEI_TYPES: + raise TeaDiscoveryError( + f"Invalid TEI type: {tei_type!r}. Must be one of: {', '.join(sorted(_VALID_TEI_TYPES))}" + ) domain = parts[3] + if not domain or not _DOMAIN_RE.match(domain): + raise TeaDiscoveryError(f"Invalid domain in TEI: {domain!r}") identifier = ":".join(parts[4:]) return tei_type, domain, identifier diff --git a/libtea/models.py b/libtea/models.py index 1e7686c..bf29ca6 100644 --- a/libtea/models.py +++ b/libtea/models.py @@ -167,7 +167,7 @@ class Release(_TeaModel): class ComponentReleaseWithCollection(_TeaModel): release: Release - latest_collection: Collection + latest_collection: Collection | None = None class Product(_TeaModel): diff --git a/tests/test_client.py b/tests/test_client.py index 5407d56..0bdc9af 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -8,6 +8,7 @@ Component, ComponentReleaseWithCollection, PaginatedProductReleaseResponse, + PaginatedProductResponse, Product, ProductRelease, Release, @@ -15,6 +16,96 @@ from tests.conftest import BASE_URL as BASE +class TestSearchProducts: + @responses.activate + def test_search_products_by_purl(self, client): + responses.get( + f"{BASE}/products", + json={ + "timestamp": "2024-03-20T15:30:00Z", + "pageStartIndex": 0, + "pageSize": 100, + "totalResults": 1, + "results": [ + { + "uuid": "abc-123", + "name": "Test Product", + "identifiers": [{"idType": "PURL", "idValue": "pkg:pypi/foo"}], + }, + ], + }, + ) + resp = client.search_products("PURL", "pkg:pypi/foo") + assert isinstance(resp, PaginatedProductResponse) + assert resp.total_results == 1 + assert resp.results[0].name == "Test Product" + request = responses.calls[0].request + assert "idType=PURL" in str(request.url) + assert "idValue=pkg" in str(request.url) + + @responses.activate + def test_search_products_pagination(self, client): + responses.get( + f"{BASE}/products", + json={ + "timestamp": "2024-03-20T15:30:00Z", + "pageStartIndex": 10, + "pageSize": 25, + "totalResults": 50, + "results": [], + }, + ) + resp = client.search_products("CPE", "cpe:2.3:a:vendor:product", page_offset=10, page_size=25) + request = responses.calls[0].request + assert "pageOffset=10" in str(request.url) + assert "pageSize=25" in str(request.url) + assert resp.page_start_index == 10 + + @responses.activate + def test_search_products_empty(self, client): + responses.get( + f"{BASE}/products", + json={ + "timestamp": "2024-03-20T15:30:00Z", + "pageStartIndex": 0, + "pageSize": 100, + "totalResults": 0, + "results": [], + }, + ) + resp = client.search_products("PURL", "pkg:pypi/nonexistent") + assert resp.total_results == 0 + assert resp.results == [] + + +class TestSearchProductReleases: + @responses.activate + def test_search_product_releases_by_purl(self, client): + responses.get( + f"{BASE}/productReleases", + json={ + "timestamp": "2024-03-20T15:30:00Z", + "pageStartIndex": 0, + "pageSize": 100, + "totalResults": 1, + "results": [ + { + "uuid": "rel-1", + "version": "1.0.0", + "createdDate": "2024-01-01T00:00:00Z", + "components": [{"uuid": "comp-1"}], + } + ], + }, + ) + resp = client.search_product_releases("PURL", "pkg:pypi/foo@1.0.0") + assert isinstance(resp, PaginatedProductReleaseResponse) + assert resp.total_results == 1 + assert resp.results[0].version == "1.0.0" + request = responses.calls[0].request + assert "idType=PURL" in str(request.url) + + class TestProduct: @responses.activate def test_get_product(self, client): @@ -125,6 +216,21 @@ def test_get_component_release(self, client): result = client.get_component_release("cr-1") assert isinstance(result, ComponentReleaseWithCollection) assert result.release.version == "1.0.0" + assert result.latest_collection is not None + + @responses.activate + def test_get_component_release_without_collection(self, client): + responses.get( + f"{BASE}/componentRelease/cr-2", + json={ + "release": {"uuid": "cr-2", "version": "2.0.0", "createdDate": "2024-01-01T00:00:00Z"}, + "latestCollection": None, + }, + ) + result = client.get_component_release("cr-2") + assert isinstance(result, ComponentReleaseWithCollection) + assert result.release.version == "2.0.0" + assert result.latest_collection is None @responses.activate def test_get_component_release_collection_latest(self, client): @@ -275,6 +381,53 @@ def test_get_product_releases_pagination_params(self, client): assert resp.page_start_index == 50 +class TestProductReleaseCollections: + @responses.activate + def test_get_product_release_collections(self, client): + responses.get( + f"{BASE}/productRelease/rel-1/collections", + json=[ + {"uuid": "rel-1", "version": 1, "artifacts": []}, + {"uuid": "rel-1", "version": 2, "artifacts": []}, + ], + ) + collections = client.get_product_release_collections("rel-1") + assert len(collections) == 2 + assert collections[0].version == 1 + + @responses.activate + def test_get_product_release_collection_by_version(self, client): + responses.get( + f"{BASE}/productRelease/rel-1/collection/5", + json={"uuid": "rel-1", "version": 5, "artifacts": []}, + ) + collection = client.get_product_release_collection("rel-1", 5) + assert collection.version == 5 + + +class TestValidationErrors: + @responses.activate + def test_validate_raises_tea_validation_error(self, client): + from libtea.exceptions import TeaValidationError + + # Missing required fields triggers Pydantic ValidationError → TeaValidationError + responses.get(f"{BASE}/product/abc", json={"bad": "data"}) + with pytest.raises(TeaValidationError, match="Invalid Product response"): + client.get_product("abc") + + @responses.activate + def test_validate_list_raises_tea_validation_error(self, client): + from libtea.exceptions import TeaValidationError + + # List with invalid items triggers Pydantic ValidationError → TeaValidationError + responses.get( + f"{BASE}/component/comp-1/releases", + json=[{"bad": "data"}], + ) + with pytest.raises(TeaValidationError, match="Invalid Release response"): + client.get_component_releases("comp-1") + + class TestContextManager: @responses.activate def test_client_as_context_manager(self): diff --git a/tests/test_discovery.py b/tests/test_discovery.py index 28f7838..180b1e5 100644 --- a/tests/test_discovery.py +++ b/tests/test_discovery.py @@ -45,6 +45,34 @@ def test_invalid_tei_empty_string(self): with pytest.raises(TeaDiscoveryError, match="Invalid TEI"): parse_tei("") + def test_invalid_tei_unknown_type(self): + with pytest.raises(TeaDiscoveryError, match="Invalid TEI type"): + parse_tei("urn:tei:unknown:example.com:some-id") + + def test_all_valid_tei_types(self): + for tei_type in ("uuid", "purl", "hash", "swid", "eanupc", "gtin", "asin", "udi"): + _, _, _ = parse_tei(f"urn:tei:{tei_type}:example.com:some-id") + + def test_invalid_tei_empty_domain(self): + with pytest.raises(TeaDiscoveryError, match="Invalid domain"): + parse_tei("urn:tei:uuid::some-id") + + def test_invalid_tei_bad_domain_format(self): + with pytest.raises(TeaDiscoveryError, match="Invalid domain"): + parse_tei("urn:tei:uuid:-invalid.com:some-id") + + def test_invalid_tei_domain_with_underscore(self): + with pytest.raises(TeaDiscoveryError, match="Invalid domain"): + parse_tei("urn:tei:uuid:bad_domain.com:some-id") + + def test_valid_tei_subdomain(self): + _, domain, _ = parse_tei("urn:tei:uuid:products.tea.example.com:some-id") + assert domain == "products.tea.example.com" + + def test_valid_tei_single_label_domain(self): + _, domain, _ = parse_tei("urn:tei:uuid:localhost:some-id") + assert domain == "localhost" + def test_tei_with_slash_in_purl_identifier(self): tei = "urn:tei:purl:cyclonedx.org:pkg:maven/org.apache/log4j@2.24.3" tei_type, domain, identifier = parse_tei(tei) @@ -79,6 +107,12 @@ def test_fetch_well_known_connection_error(self): with pytest.raises(TeaDiscoveryError, match="Failed to connect"): fetch_well_known("example.com") + @responses.activate + def test_fetch_well_known_timeout_error(self): + responses.get("https://example.com/.well-known/tea", body=requests.Timeout("timed out")) + with pytest.raises(TeaDiscoveryError, match="Failed to connect"): + fetch_well_known("example.com") + @responses.activate def test_fetch_well_known_500_raises_discovery_error(self): responses.get("https://example.com/.well-known/tea", status=500) diff --git a/tests/test_http.py b/tests/test_http.py index 35f8e74..b24fc41 100644 --- a/tests/test_http.py +++ b/tests/test_http.py @@ -1,8 +1,11 @@ +import hashlib +from unittest.mock import patch + import pytest import requests import responses -from libtea._http import TeaHttpClient +from libtea._http import TeaHttpClient, _get_package_version from libtea.exceptions import ( TeaAuthenticationError, TeaConnectionError, @@ -130,3 +133,76 @@ def test_user_agent_includes_version(self, base_url): assert ua.startswith("py-libtea/") assert "hello@sbomify.com" in ua client.close() + + @responses.activate + def test_context_manager(self, base_url): + responses.get(f"{base_url}/product/abc", json={"uuid": "abc"}) + with TeaHttpClient(base_url=base_url) as client: + data = client.get_json("/product/abc") + assert data["uuid"] == "abc" + + @responses.activate + def test_download_blake2b_256(self, http_client, tmp_path): + content = b"blake2b test content" + responses.get("https://artifacts.example.com/file.bin", body=content) + dest = tmp_path / "file.bin" + digests = http_client.download_with_hashes( + url="https://artifacts.example.com/file.bin", + dest=dest, + algorithms=["BLAKE2b-256"], + ) + expected = hashlib.blake2b(content, digest_size=32).hexdigest() + assert digests["BLAKE2b-256"] == expected + + @responses.activate + def test_download_generic_exception_cleans_up(self, http_client, tmp_path): + responses.get("https://artifacts.example.com/file.bin", status=500) + dest = tmp_path / "file.bin" + with pytest.raises(TeaServerError): + http_client.download_with_hashes(url="https://artifacts.example.com/file.bin", dest=dest) + assert not dest.exists() + + +class TestBaseUrlValidation: + def test_rejects_ftp_scheme(self): + with pytest.raises(ValueError, match="http or https scheme"): + TeaHttpClient(base_url="ftp://example.com/api") + + def test_rejects_empty_scheme(self): + with pytest.raises(ValueError, match="http or https scheme"): + TeaHttpClient(base_url="example.com/api") + + def test_rejects_missing_hostname(self): + with pytest.raises(ValueError, match="must include a hostname"): + TeaHttpClient(base_url="http:///path/only") + + def test_accepts_http(self): + client = TeaHttpClient(base_url="http://example.com/api") + assert client._base_url == "http://example.com/api" + client.close() + + def test_accepts_https(self): + client = TeaHttpClient(base_url="https://example.com/api") + assert client._base_url == "https://example.com/api" + client.close() + + def test_strips_trailing_slash(self): + client = TeaHttpClient(base_url="https://example.com/api/") + assert client._base_url == "https://example.com/api" + client.close() + + +class TestGetPackageVersion: + def test_fallback_to_tomllib(self): + with patch("importlib.metadata.version", side_effect=Exception("not installed")): + result = _get_package_version() + # Falls back to tomllib parsing of pyproject.toml + assert isinstance(result, str) + + def test_fallback_to_unknown(self): + with ( + patch("importlib.metadata.version", side_effect=Exception("not installed")), + patch("tomllib.load", side_effect=Exception("parse error")), + ): + result = _get_package_version() + assert result == "unknown" From 48bbe327e7e366a18f167bed9840049887702b1a Mon Sep 17 00:00:00 2001 From: Rana Aurangzaib Date: Wed, 25 Feb 2026 21:02:17 +0300 Subject: [PATCH 12/17] Update README, enhance discovery functionality, and add UDI support - Expanded README to include detailed features, usage examples, and a new section on unsupported features. - Modified `fetch_well_known` in `discovery.py` to include a user-agent header for requests. - Added UDI (Unique Device Identifier) to `IdentifierType` in `models.py`. - Implemented tests to verify user-agent functionality and UDI identifier handling in products. --- README.md | 35 ++++++++++++++++++++++++++++++++++- libtea/discovery.py | 4 +++- libtea/models.py | 1 + tests/test_discovery.py | 14 ++++++++++++++ tests/test_models.py | 11 +++++++++++ 5 files changed, 63 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 402e776..e4f9c5d 100644 --- a/README.md +++ b/README.md @@ -7,10 +7,21 @@ Python client library for the [Transparency Exchange API (TEA)](https://transparency.exchange/) v0.3.0-beta.2. -TEA is an open standard for discovering and retrieving software transparency artifacts (SBOMs, VEX, build metadata) for any software product or component. +TEA is an open standard for discovering and retrieving software transparency artifacts (SBOMs, VEX, build metadata) for any software product or component. A [TEI identifier](https://github.com/CycloneDX/transparency-exchange-api/blob/main/discovery/readme.md) resolves via DNS to the right endpoint, similar to how email uses MX records — so consumers can fetch artifacts without knowing which server hosts them. + +**Specification:** [Ecma TC54-TG1](https://tc54.org/tea/) | [OpenAPI spec](https://github.com/CycloneDX/transparency-exchange-api) > **Status**: Alpha — API is subject to change. +### Features + +- Auto-discovery via `.well-known/tea` and TEI URNs +- Products, components, releases, and versioned collections +- Search by PURL, CPE, or TEI identifier +- Artifact download with on-the-fly checksum verification +- Typed Pydantic v2 models with full camelCase/snake_case conversion +- Structured exception hierarchy with error context + ## Installation ```bash @@ -42,6 +53,20 @@ client = TeaClient(base_url="https://api.example.com/tea/v0.3.0-beta.2") ## Usage +### Search + +```python +with TeaClient.from_well_known("example.com") as client: + # Search by PURL + results = client.search_products("PURL", "pkg:pypi/requests") + for product in results.results: + print(product.name, product.uuid) + + # Search product releases + releases = client.search_product_releases("PURL", "pkg:pypi/requests@2.31.0") + print(releases.total_results) +``` + ### Products and releases ```python @@ -146,6 +171,14 @@ Exception hierarchy: - [requests](https://requests.readthedocs.io/) for HTTP - [Pydantic](https://docs.pydantic.dev/) v2 for data models +## Not yet supported + +- Publisher API (spec is consumer-only in beta.2) +- Async client +- CLE (Common Lifecycle Enumeration) endpoints +- Mutual TLS (mTLS) authentication +- Endpoint failover with retry + ## Development This project uses [uv](https://docs.astral.sh/uv/) for dependency management. diff --git a/libtea/discovery.py b/libtea/discovery.py index 16bd399..414bd10 100644 --- a/libtea/discovery.py +++ b/libtea/discovery.py @@ -37,9 +37,11 @@ def parse_tei(tei: str) -> tuple[str, str, str]: def fetch_well_known(domain: str, *, timeout: float = 10.0) -> TeaWellKnown: """Fetch and parse the .well-known/tea document from a domain via HTTPS.""" + from libtea._http import USER_AGENT + url = f"https://{domain}/.well-known/tea" try: - response = requests.get(url, timeout=timeout, allow_redirects=True) + response = requests.get(url, timeout=timeout, allow_redirects=True, headers={"user-agent": USER_AGENT}) if response.status_code >= 400: raise TeaDiscoveryError(f"Failed to fetch {url}: HTTP {response.status_code}") except requests.ConnectionError as exc: diff --git a/libtea/models.py b/libtea/models.py index bf29ca6..3c789b4 100644 --- a/libtea/models.py +++ b/libtea/models.py @@ -24,6 +24,7 @@ class IdentifierType(StrEnum): CPE = "CPE" TEI = "TEI" PURL = "PURL" + UDI = "UDI" class ChecksumAlgorithm(StrEnum): diff --git a/tests/test_discovery.py b/tests/test_discovery.py index 180b1e5..cd51f2a 100644 --- a/tests/test_discovery.py +++ b/tests/test_discovery.py @@ -95,6 +95,20 @@ def test_fetch_well_known_success(self): assert wk.schema_version == 1 assert len(wk.endpoints) == 1 + @responses.activate + def test_fetch_well_known_sends_user_agent(self): + responses.get( + "https://example.com/.well-known/tea", + json={ + "schemaVersion": 1, + "endpoints": [{"url": "https://api.example.com", "versions": ["1.0.0"]}], + }, + ) + fetch_well_known("example.com") + ua = responses.calls[0].request.headers["user-agent"] + assert ua.startswith("py-libtea/") + assert "hello@sbomify.com" in ua + @responses.activate def test_fetch_well_known_404_raises_discovery_error(self): responses.get("https://example.com/.well-known/tea", status=404) diff --git a/tests/test_models.py b/tests/test_models.py index f8ee571..52517fc 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -23,6 +23,7 @@ def test_identifier_type_values(self): assert IdentifierType.CPE == "CPE" assert IdentifierType.TEI == "TEI" assert IdentifierType.PURL == "PURL" + assert IdentifierType.UDI == "UDI" def test_checksum_algorithm_values(self): assert ChecksumAlgorithm.SHA_256 == "SHA-256" @@ -196,6 +197,16 @@ def test_product_from_json(self): assert len(product.identifiers) == 2 assert product.identifiers[0].id_type == IdentifierType.CPE + def test_product_with_udi_identifier(self): + data = { + "uuid": "abc-123", + "name": "Medical Device", + "identifiers": [{"idType": "UDI", "idValue": "00123456789012"}], + } + product = Product.model_validate(data) + assert product.identifiers[0].id_type == IdentifierType.UDI + assert product.identifiers[0].id_value == "00123456789012" + class TestRelease: def test_release_from_json(self): From 77d1e69466b1bd18d38c775b88e7898dd4d79702 Mon Sep 17 00:00:00 2001 From: Rana Aurangzaib Date: Wed, 25 Feb 2026 23:06:04 +0300 Subject: [PATCH 13/17] Enhance CI workflows, update dependencies, and improve error handling - Updated CI workflows to use specific versions for actions, ensuring stability. - Added `--cov-branch` option to pytest for improved coverage reporting. - Enhanced error handling in the HTTP client to log connection issues and timeouts. - Introduced new validation for URL path segments in the API client to prevent injection attacks. - Updated models to enforce constraints on endpoint versions and priorities. - Added comprehensive tests for new validation logic and error handling improvements. --- .github/workflows/ci.yaml | 4 +- .github/workflows/codeql.yaml | 8 +- .github/workflows/pypi.yaml | 12 +- libtea/__init__.py | 78 +++++++++++- libtea/_http.py | 162 ++++++++++++++++-------- libtea/client.py | 111 ++++++++++------- libtea/discovery.py | 135 ++++++++++++++++++-- libtea/exceptions.py | 6 +- libtea/models.py | 24 ++-- pyproject.toml | 2 +- tests/test_client.py | 151 +++++++++++++---------- tests/test_discovery.py | 159 ++++++++++++++++++++++-- tests/test_download.py | 34 ++++- tests/test_http.py | 225 ++++++++++++++++++++++++++++++---- tests/test_integration.py | 11 +- 15 files changed, 884 insertions(+), 238 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index f202480..4a50191 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -12,8 +12,8 @@ jobs: matrix: python-version: ["3.11", "3.12", "3.13"] steps: - - uses: actions/checkout@v4 - - uses: astral-sh/setup-uv@v5 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + - uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5.4.1 - run: uv python install ${{ matrix.python-version }} - run: uv sync - run: uv run ruff check . diff --git a/.github/workflows/codeql.yaml b/.github/workflows/codeql.yaml index 8e68a06..3201701 100644 --- a/.github/workflows/codeql.yaml +++ b/.github/workflows/codeql.yaml @@ -16,15 +16,15 @@ jobs: matrix: language: [python] steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Initialize CodeQL - uses: github/codeql-action/init@v3 + uses: github/codeql-action/init@48ab28a6f5dbc2a99bf1e0131198dd8f1df78169 # v3.28.0 with: languages: ${{ matrix.language }} - name: Autobuild - uses: github/codeql-action/autobuild@v3 + uses: github/codeql-action/autobuild@48ab28a6f5dbc2a99bf1e0131198dd8f1df78169 # v3.28.0 - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 + uses: github/codeql-action/analyze@48ab28a6f5dbc2a99bf1e0131198dd8f1df78169 # v3.28.0 diff --git a/.github/workflows/pypi.yaml b/.github/workflows/pypi.yaml index bf3e3c7..87be0b8 100644 --- a/.github/workflows/pypi.yaml +++ b/.github/workflows/pypi.yaml @@ -17,9 +17,9 @@ jobs: permissions: id-token: write steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: astral-sh/setup-uv@v5 + - uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5.4.1 - name: Determine version id: version @@ -37,7 +37,7 @@ jobs: run: uv build - name: Publish to TestPyPI - uses: pypa/gh-action-pypi-publish@release/v1 + uses: pypa/gh-action-pypi-publish@76f52bc884231f62b54f72e44af3222236a5b286 # release/v1 with: repository-url: https://test.pypi.org/legacy/ @@ -50,12 +50,12 @@ jobs: permissions: id-token: write steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: astral-sh/setup-uv@v5 + - uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5.4.1 - name: Build package run: uv build - name: Publish to PyPI - uses: pypa/gh-action-pypi-publish@release/v1 + uses: pypa/gh-action-pypi-publish@76f52bc884231f62b54f72e44af3222236a5b286 # release/v1 diff --git a/libtea/__init__.py b/libtea/__init__.py index ddcf4c2..96b122d 100644 --- a/libtea/__init__.py +++ b/libtea/__init__.py @@ -2,7 +2,81 @@ from importlib.metadata import version -from libtea.client import TeaClient +from libtea.client import TEA_SPEC_VERSION, TeaClient +from libtea.exceptions import ( + TeaAuthenticationError, + TeaChecksumError, + TeaConnectionError, + TeaDiscoveryError, + TeaError, + TeaInsecureTransportWarning, + TeaNotFoundError, + TeaRequestError, + TeaServerError, + TeaValidationError, +) +from libtea.models import ( + Artifact, + ArtifactFormat, + ArtifactType, + Checksum, + ChecksumAlgorithm, + Collection, + CollectionBelongsTo, + CollectionUpdateReason, + CollectionUpdateReasonType, + Component, + ComponentRef, + ComponentReleaseWithCollection, + DiscoveryInfo, + ErrorType, + Identifier, + IdentifierType, + PaginatedProductReleaseResponse, + PaginatedProductResponse, + Product, + ProductRelease, + Release, + ReleaseDistribution, + TeaServerInfo, +) __version__ = version("libtea") -__all__ = ["TeaClient", "__version__"] +__all__ = [ + "TEA_SPEC_VERSION", + "TeaClient", + "TeaError", + "TeaAuthenticationError", + "TeaChecksumError", + "TeaConnectionError", + "TeaDiscoveryError", + "TeaInsecureTransportWarning", + "TeaNotFoundError", + "TeaRequestError", + "TeaServerError", + "TeaValidationError", + "Artifact", + "ArtifactFormat", + "ArtifactType", + "Checksum", + "ChecksumAlgorithm", + "Collection", + "CollectionBelongsTo", + "CollectionUpdateReason", + "CollectionUpdateReasonType", + "Component", + "ComponentRef", + "ComponentReleaseWithCollection", + "DiscoveryInfo", + "ErrorType", + "Identifier", + "IdentifierType", + "PaginatedProductReleaseResponse", + "PaginatedProductResponse", + "Product", + "ProductRelease", + "Release", + "ReleaseDistribution", + "TeaServerInfo", + "__version__", +] diff --git a/libtea/_http.py b/libtea/_http.py index b4ddf6f..00779d1 100644 --- a/libtea/_http.py +++ b/libtea/_http.py @@ -1,44 +1,94 @@ """Internal HTTP client wrapping requests with TEA error handling.""" import hashlib +import logging +import warnings from pathlib import Path -from typing import Any +from types import TracebackType +from typing import Any, Self from urllib.parse import urlparse import requests from libtea.exceptions import ( TeaAuthenticationError, + TeaChecksumError, TeaConnectionError, + TeaInsecureTransportWarning, TeaNotFoundError, TeaRequestError, TeaServerError, TeaValidationError, ) +logger = logging.getLogger("libtea") + +# Hash algorithm registry: {TEA name: (hashlib name, digest_size)}. +# When digest_size is None, hashlib.new(name) is used with its default size. +# When digest_size is set, hashlib.blake2b(digest_size=N) is used instead. +# BLAKE3 is intentionally excluded — handled separately in _build_hashers. +_HASH_REGISTRY: dict[str, tuple[str, int | None]] = { + "MD5": ("md5", None), + "SHA-1": ("sha1", None), + "SHA-256": ("sha256", None), + "SHA-384": ("sha384", None), + "SHA-512": ("sha512", None), + "SHA3-256": ("sha3_256", None), + "SHA3-384": ("sha3_384", None), + "SHA3-512": ("sha3_512", None), + "BLAKE2b-256": ("blake2b", 32), + "BLAKE2b-384": ("blake2b", 48), + "BLAKE2b-512": ("blake2b", 64), +} + def _get_package_version() -> str: """Get the package version for User-Agent header.""" try: - from importlib.metadata import version + from importlib.metadata import PackageNotFoundError, version return version("libtea") - except Exception: - try: - import tomllib - - pyproject_path = Path(__file__).parent.parent / "pyproject.toml" - if pyproject_path.exists(): - with open(pyproject_path, "rb") as f: - pyproject_data = tomllib.load(f) - return pyproject_data.get("project", {}).get("version", "unknown") - except Exception: - pass + except (PackageNotFoundError, ValueError): return "unknown" USER_AGENT = f"py-libtea/{_get_package_version()} (hello@sbomify.com)" +_BLOCKED_SCHEMES = frozenset({"file", "ftp", "gopher", "data"}) + + +def _build_hashers(algorithms: list[str]) -> dict[str, Any]: + """Build hashlib hasher objects for the given algorithm names.""" + hashers: dict[str, Any] = {} + for alg in algorithms: + if alg == "BLAKE3": + raise TeaChecksumError( + "BLAKE3 is not supported by Python's hashlib. " + "Install the 'blake3' package or use a different algorithm.", + algorithm="BLAKE3", + ) + entry = _HASH_REGISTRY.get(alg) + if entry is None: + raise TeaChecksumError( + f"Unsupported checksum algorithm: {alg!r}. Supported: {', '.join(sorted(_HASH_REGISTRY.keys()))}", + algorithm=alg, + ) + hashlib_name, digest_size = entry + if digest_size is not None: + hashers[alg] = hashlib.blake2b(digest_size=digest_size) + else: + hashers[alg] = hashlib.new(hashlib_name) + return hashers + + +def _validate_download_url(url: str) -> None: + """Reject download URLs that use non-HTTP schemes.""" + parsed = urlparse(url) + if parsed.scheme in _BLOCKED_SCHEMES or parsed.scheme not in ("http", "https"): + raise TeaValidationError(f"Artifact download URL must use http or https scheme, got {parsed.scheme!r}") + if not parsed.hostname: + raise TeaValidationError(f"Artifact download URL must include a hostname: {url!r}") + class TeaHttpClient: """Low-level HTTP client for TEA API requests.""" @@ -55,6 +105,14 @@ def __init__( raise ValueError(f"base_url must use http or https scheme, got {parsed.scheme!r}") if not parsed.hostname: raise ValueError(f"base_url must include a hostname: {base_url!r}") + if parsed.scheme == "http" and token: + raise ValueError("Cannot use bearer token with plaintext HTTP. Use https:// or remove the token.") + if parsed.scheme == "http": + warnings.warn( + "Using plaintext HTTP is insecure. Use HTTPS in production.", + TeaInsecureTransportWarning, + stacklevel=2, + ) self._base_url = parsed.geturl().rstrip("/") self._timeout = timeout self._session = requests.Session() @@ -66,10 +124,15 @@ def get_json(self, path: str, *, params: dict[str, Any] | None = None) -> Any: """Send GET request and return parsed JSON.""" url = f"{self._base_url}{path}" try: - response = self._session.get(url, params=params, timeout=self._timeout) + response = self._session.get(url, params=params, timeout=self._timeout, allow_redirects=False) except requests.ConnectionError as exc: + logger.warning("Connection error for %s: %s", url, exc) raise TeaConnectionError(str(exc)) from exc except requests.Timeout as exc: + logger.warning("Timeout for %s: %s", url, exc) + raise TeaConnectionError(str(exc)) from exc + except requests.RequestException as exc: + logger.warning("Request error for %s: %s", url, exc) raise TeaConnectionError(str(exc)) from exc self._raise_for_status(response) @@ -84,36 +147,8 @@ def download_with_hashes(self, url: str, dest: Path, algorithms: list[str] | Non Uses a separate unauthenticated session so that the bearer token is not leaked to third-party artifact hosts (CDNs, Maven Central, etc.). """ - from libtea.exceptions import TeaChecksumError - - hashers: dict[str, Any] = {} - if algorithms: - alg_map = { - "MD5": "md5", - "SHA-1": "sha1", - "SHA-256": "sha256", - "SHA-384": "sha384", - "SHA-512": "sha512", - "SHA3-256": "sha3_256", - "SHA3-384": "sha3_384", - "SHA3-512": "sha3_512", - "BLAKE2b-256": "blake2b", - "BLAKE2b-384": "blake2b", - "BLAKE2b-512": "blake2b", - } - blake2b_sizes = {"BLAKE2b-256": 32, "BLAKE2b-384": 48, "BLAKE2b-512": 64} - for alg in algorithms: - if alg == "BLAKE3": - raise TeaChecksumError( - "BLAKE3 is not supported by Python's hashlib. " - "Install the 'blake3' package or use a different algorithm.", - algorithm="BLAKE3", - ) - hashlib_name = alg_map.get(alg) - if hashlib_name == "blake2b": - hashers[alg] = hashlib.blake2b(digest_size=blake2b_sizes[alg]) - elif hashlib_name: - hashers[alg] = hashlib.new(hashlib_name) + _validate_download_url(url) + hashers = _build_hashers(algorithms) if algorithms else {} dest.parent.mkdir(parents=True, exist_ok=True) try: @@ -129,19 +164,31 @@ def download_with_hashes(self, url: str, dest: Path, algorithms: list[str] | Non except (requests.ConnectionError, requests.Timeout) as exc: dest.unlink(missing_ok=True) raise TeaConnectionError(str(exc)) from exc - except Exception: + except requests.RequestException as exc: dest.unlink(missing_ok=True) + raise TeaConnectionError(f"Download failed: {exc}") from exc + except Exception: + try: + dest.unlink(missing_ok=True) + except OSError: + logger.warning("Failed to clean up partial download at %s", dest) raise return {alg: h.hexdigest() for alg, h in hashers.items()} def close(self) -> None: + self._session.headers.pop("authorization", None) self._session.close() - def __enter__(self): + def __enter__(self) -> Self: return self - def __exit__(self, *args): + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: self.close() @staticmethod @@ -150,18 +197,25 @@ def _raise_for_status(response: requests.Response) -> None: status = response.status_code if 200 <= status < 300: return - + if 300 <= status < 400: + raise TeaRequestError(f"Unexpected redirect: HTTP {status}") if status in (401, 403): + logger.warning("Authentication failed: HTTP %d for %s", status, response.url) raise TeaAuthenticationError(f"Authentication failed: HTTP {status}") - elif status == 404: + if status == 404: error_type = None try: body = response.json() - error_type = body.get("error") - except Exception: + if isinstance(body, dict): + error_type = body.get("error") + except ValueError: pass raise TeaNotFoundError(f"Not found: HTTP {status}", error_type=error_type) - elif 400 <= status < 500: - raise TeaRequestError(f"Client error: HTTP {status}") - elif status >= 500: + if status >= 500: raise TeaServerError(f"Server error: HTTP {status}") + # Remaining 4xx codes (400, 405-499 excluding 401/403/404) + body_text = response.text[:200] if response.text else "" + msg = f"Client error: HTTP {status}" + if body_text: + msg = f"{msg} — {body_text}" + raise TeaRequestError(msg) diff --git a/libtea/client.py b/libtea/client.py index aa0b2fb..02503db 100644 --- a/libtea/client.py +++ b/libtea/client.py @@ -1,14 +1,17 @@ """TeaClient - main entry point for the TEA consumer API.""" +import hmac +import logging +import re from pathlib import Path from types import TracebackType -from typing import Self +from typing import Any, Self, TypeVar -from pydantic import ValidationError +from pydantic import BaseModel, ValidationError from libtea._http import TeaHttpClient from libtea.discovery import fetch_well_known, select_endpoint -from libtea.exceptions import TeaValidationError +from libtea.exceptions import TeaChecksumError, TeaValidationError from libtea.models import ( Artifact, Checksum, @@ -23,10 +26,17 @@ Release, ) +logger = logging.getLogger("libtea") + TEA_SPEC_VERSION = "0.3.0-beta.2" +_M = TypeVar("_M", bound=BaseModel) + +# Restrict URL path segments to safe characters to prevent path traversal and injection. +_SAFE_PATH_SEGMENT_RE = re.compile(r"^[a-zA-Z0-9\-]{1,128}$") -def _validate(model_cls, data): + +def _validate(model_cls: type[_M], data: Any) -> _M: """Validate data against a Pydantic model, wrapping errors in TeaValidationError.""" try: return model_cls.model_validate(data) @@ -34,7 +44,7 @@ def _validate(model_cls, data): raise TeaValidationError(f"Invalid {model_cls.__name__} response: {exc}") from exc -def _validate_list(model_cls, data): +def _validate_list(model_cls: type[_M], data: list[Any]) -> list[_M]: """Validate a list of items against a Pydantic model.""" try: return [model_cls.model_validate(item) for item in data] @@ -42,6 +52,15 @@ def _validate_list(model_cls, data): raise TeaValidationError(f"Invalid {model_cls.__name__} response: {exc}") from exc +def _validate_path_segment(value: str, name: str = "uuid") -> str: + """Validate that a value is safe to interpolate into a URL path.""" + if not _SAFE_PATH_SEGMENT_RE.match(value): + raise TeaValidationError( + f"Invalid {name}: {value!r}. Must contain only alphanumeric characters and hyphens, max 128 characters." + ) + return value + + class TeaClient: """Synchronous client for the Transparency Exchange API.""" @@ -62,7 +81,7 @@ def from_well_known( token: str | None = None, timeout: float = 30.0, version: str = TEA_SPEC_VERSION, - ) -> "TeaClient": + ) -> Self: """Create a client by discovering the TEA endpoint from a domain's .well-known/tea.""" well_known = fetch_well_known(domain, timeout=timeout) endpoint = select_endpoint(well_known, version) @@ -89,14 +108,14 @@ def search_products( return _validate(PaginatedProductResponse, data) def get_product(self, uuid: str) -> Product: - data = self._http.get_json(f"/product/{uuid}") + data = self._http.get_json(f"/product/{_validate_path_segment(uuid)}") return _validate(Product, data) def get_product_releases( self, uuid: str, *, page_offset: int = 0, page_size: int = 100 ) -> PaginatedProductReleaseResponse: data = self._http.get_json( - f"/product/{uuid}/releases", + f"/product/{_validate_path_segment(uuid)}/releases", params={"pageOffset": page_offset, "pageSize": page_size}, ) return _validate(PaginatedProductReleaseResponse, data) @@ -114,53 +133,53 @@ def search_product_releases( return _validate(PaginatedProductReleaseResponse, data) def get_product_release(self, uuid: str) -> ProductRelease: - data = self._http.get_json(f"/productRelease/{uuid}") + data = self._http.get_json(f"/productRelease/{_validate_path_segment(uuid)}") return _validate(ProductRelease, data) def get_product_release_collection_latest(self, uuid: str) -> Collection: - data = self._http.get_json(f"/productRelease/{uuid}/collection/latest") + data = self._http.get_json(f"/productRelease/{_validate_path_segment(uuid)}/collection/latest") return _validate(Collection, data) def get_product_release_collections(self, uuid: str) -> list[Collection]: - data = self._http.get_json(f"/productRelease/{uuid}/collections") + data = self._http.get_json(f"/productRelease/{_validate_path_segment(uuid)}/collections") return _validate_list(Collection, data) def get_product_release_collection(self, uuid: str, version: int) -> Collection: - data = self._http.get_json(f"/productRelease/{uuid}/collection/{version}") + data = self._http.get_json(f"/productRelease/{_validate_path_segment(uuid)}/collection/{version}") return _validate(Collection, data) # --- Components --- def get_component(self, uuid: str) -> Component: - data = self._http.get_json(f"/component/{uuid}") + data = self._http.get_json(f"/component/{_validate_path_segment(uuid)}") return _validate(Component, data) def get_component_releases(self, uuid: str) -> list[Release]: - data = self._http.get_json(f"/component/{uuid}/releases") + data = self._http.get_json(f"/component/{_validate_path_segment(uuid)}/releases") return _validate_list(Release, data) # --- Component Releases --- def get_component_release(self, uuid: str) -> ComponentReleaseWithCollection: - data = self._http.get_json(f"/componentRelease/{uuid}") + data = self._http.get_json(f"/componentRelease/{_validate_path_segment(uuid)}") return _validate(ComponentReleaseWithCollection, data) def get_component_release_collection_latest(self, uuid: str) -> Collection: - data = self._http.get_json(f"/componentRelease/{uuid}/collection/latest") + data = self._http.get_json(f"/componentRelease/{_validate_path_segment(uuid)}/collection/latest") return _validate(Collection, data) def get_component_release_collections(self, uuid: str) -> list[Collection]: - data = self._http.get_json(f"/componentRelease/{uuid}/collections") + data = self._http.get_json(f"/componentRelease/{_validate_path_segment(uuid)}/collections") return _validate_list(Collection, data) def get_component_release_collection(self, uuid: str, version: int) -> Collection: - data = self._http.get_json(f"/componentRelease/{uuid}/collection/{version}") + data = self._http.get_json(f"/componentRelease/{_validate_path_segment(uuid)}/collection/{version}") return _validate(Collection, data) # --- Artifacts --- def get_artifact(self, uuid: str) -> Artifact: - data = self._http.get_json(f"/artifact/{uuid}") + data = self._http.get_json(f"/artifact/{_validate_path_segment(uuid)}") return _validate(Artifact, data) def download_artifact( @@ -171,35 +190,45 @@ def download_artifact( verify_checksums: list[Checksum] | None = None, ) -> Path: """Download an artifact file, optionally verifying checksums.""" - from libtea.exceptions import TeaChecksumError - algorithms = [cs.alg_type.value for cs in verify_checksums] if verify_checksums else None computed = self._http.download_with_hashes(url, dest, algorithms=algorithms) if verify_checksums: - for cs in verify_checksums: - alg_name = cs.alg_type.value - expected = cs.alg_value.lower() - if alg_name not in computed: - dest.unlink(missing_ok=True) - raise TeaChecksumError( - f"No computed digest for algorithm: {alg_name}", - algorithm=alg_name, - expected=expected, - actual=None, - ) - actual = computed[alg_name].lower() - if actual != expected: - dest.unlink(missing_ok=True) - raise TeaChecksumError( - f"{alg_name} mismatch: expected {expected}, got {actual}", - algorithm=alg_name, - expected=expected, - actual=actual, - ) + self._verify_checksums(verify_checksums, computed, url, dest) return dest + @staticmethod + def _verify_checksums(checksums: list[Checksum], computed: dict[str, str], url: str, dest: Path) -> None: + """Verify computed checksums against expected values, cleaning up on failure.""" + for cs in checksums: + alg_name = cs.alg_type.value + expected = cs.alg_value.lower() + if alg_name not in computed: + dest.unlink(missing_ok=True) + raise TeaChecksumError( + f"No computed digest for algorithm: {alg_name}", + algorithm=alg_name, + expected=expected, + actual=None, + ) + actual = computed[alg_name].lower() + if not hmac.compare_digest(actual, expected): + dest.unlink(missing_ok=True) + logger.error( + "Checksum mismatch for %s: algorithm=%s expected=%s actual=%s", + url, + alg_name, + expected, + actual, + ) + raise TeaChecksumError( + f"{alg_name} mismatch: expected {expected}, got {actual}", + algorithm=alg_name, + expected=expected, + actual=actual, + ) + # --- Lifecycle --- def close(self) -> None: diff --git a/libtea/discovery.py b/libtea/discovery.py index 414bd10..f67825f 100644 --- a/libtea/discovery.py +++ b/libtea/discovery.py @@ -1,12 +1,99 @@ """TEI parsing, .well-known/tea fetching, and endpoint selection.""" +import logging import re +from functools import total_ordering import requests +from pydantic import ValidationError +from libtea._http import USER_AGENT from libtea.exceptions import TeaDiscoveryError from libtea.models import TeaEndpoint, TeaWellKnown +_SEMVER_RE = re.compile(r"^(?P\d+)\.(?P\d+)(?:\.(?P\d+))?(?:-(?P
[0-9A-Za-z.-]+))?$")
+
+
+@total_ordering
+class _SemVer:
+    """Minimal SemVer 2.0.0 parser for version precedence comparison.
+
+    Implements comparison per https://semver.org/#spec-item-11:
+    - MAJOR.MINOR.PATCH compared numerically left-to-right
+    - Pre-release versions have lower precedence than the normal version
+    - Pre-release identifiers: numeric < alphanumeric, numeric compared as ints,
+      alphanumeric compared lexically; shorter tuple has lower precedence
+    """
+
+    __slots__ = ("major", "minor", "patch", "pre", "_raw")
+
+    def __init__(self, version_str: str) -> None:
+        m = _SEMVER_RE.match(version_str)
+        if not m:
+            raise ValueError(f"Invalid SemVer string: {version_str!r}")
+        self._raw = version_str
+        self.major = int(m["major"])
+        self.minor = int(m["minor"])
+        self.patch = int(m["patch"]) if m["patch"] is not None else 0
+        self.pre: tuple[int | str, ...] = tuple(_SemVer._parse_pre(m["pre"])) if m["pre"] else ()
+
+    @staticmethod
+    def _parse_pre(pre_str: str) -> list[int | str]:
+        parts: list[int | str] = []
+        for part in pre_str.split("."):
+            parts.append(int(part) if part.isdigit() else part)
+        return parts
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, _SemVer):
+            return NotImplemented
+        return (self.major, self.minor, self.patch, self.pre) == (other.major, other.minor, other.patch, other.pre)
+
+    def __hash__(self) -> int:
+        return hash((self.major, self.minor, self.patch, self.pre))
+
+    def __lt__(self, other: object) -> bool:
+        if not isinstance(other, _SemVer):
+            return NotImplemented
+        if (self.major, self.minor, self.patch) != (other.major, other.minor, other.patch):
+            return (self.major, self.minor, self.patch) < (other.major, other.minor, other.patch)
+        # Pre-release has lower precedence than no pre-release
+        if self.pre and not other.pre:
+            return True
+        if not self.pre and other.pre:
+            return False
+        if not self.pre and not other.pre:
+            return False
+        # Compare pre-release identifiers per SemVer spec item 11.4
+        return _SemVer._compare_pre(self.pre, other.pre) < 0
+
+    @staticmethod
+    def _compare_pre(a: tuple[int | str, ...], b: tuple[int | str, ...]) -> int:
+        for ai, bi in zip(a, b):
+            if type(ai) is type(bi):
+                if ai < bi:  # type: ignore[operator]
+                    return -1
+                if ai > bi:  # type: ignore[operator]
+                    return 1
+            else:
+                # Numeric identifiers always have lower precedence than alphanumeric
+                return -1 if isinstance(ai, int) else 1
+        # Shorter set has lower precedence
+        if len(a) < len(b):
+            return -1
+        if len(a) > len(b):
+            return 1
+        return 0
+
+    def __repr__(self) -> str:
+        return f"_SemVer({self._raw!r})"
+
+    def __str__(self) -> str:
+        return self._raw
+
+
+logger = logging.getLogger("libtea")
+
 _VALID_TEI_TYPES = frozenset({"uuid", "purl", "hash", "swid", "eanupc", "gtin", "asin", "udi"})
 _DOMAIN_RE = re.compile(
     r"^[a-zA-Z0-9]([a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])?(\.[a-zA-Z0-9]([a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])?)*$"
@@ -37,19 +124,27 @@ def parse_tei(tei: str) -> tuple[str, str, str]:
 
 def fetch_well_known(domain: str, *, timeout: float = 10.0) -> TeaWellKnown:
     """Fetch and parse the .well-known/tea document from a domain via HTTPS."""
-    from libtea._http import USER_AGENT
-
+    if not domain or not _DOMAIN_RE.match(domain):
+        raise TeaDiscoveryError(f"Invalid domain: {domain!r}")
     url = f"https://{domain}/.well-known/tea"
     try:
         response = requests.get(url, timeout=timeout, allow_redirects=True, headers={"user-agent": USER_AGENT})
+        if 300 <= response.status_code < 400:
+            raise TeaDiscoveryError(f"Unexpected redirect from {url}: HTTP {response.status_code}")
         if response.status_code >= 400:
-            raise TeaDiscoveryError(f"Failed to fetch {url}: HTTP {response.status_code}")
+            body_snippet = response.text[:200] if response.text else ""
+            msg = f"Failed to fetch {url}: HTTP {response.status_code}"
+            if body_snippet:
+                msg = f"{msg} — {body_snippet}"
+            raise TeaDiscoveryError(msg)
     except requests.ConnectionError as exc:
+        logger.warning("Discovery connection error for %s: %s", url, exc)
         raise TeaDiscoveryError(f"Failed to connect to {url}: {exc}") from exc
     except requests.Timeout as exc:
+        logger.warning("Discovery timeout for %s: %s", url, exc)
         raise TeaDiscoveryError(f"Failed to connect to {url}: {exc}") from exc
-    except TeaDiscoveryError:
-        raise
+    except requests.RequestException as exc:
+        raise TeaDiscoveryError(f"HTTP error fetching {url}: {exc}") from exc
 
     try:
         data = response.json()
@@ -58,16 +153,32 @@ def fetch_well_known(domain: str, *, timeout: float = 10.0) -> TeaWellKnown:
 
     try:
         return TeaWellKnown.model_validate(data)
-    except Exception as exc:
+    except ValidationError as exc:
         raise TeaDiscoveryError(f"Invalid .well-known/tea document from {domain}: {exc}") from exc
 
 
 def select_endpoint(well_known: TeaWellKnown, supported_version: str) -> TeaEndpoint:
     """Select the best endpoint that supports the given version.
 
-    Prefers endpoints with the requested version, then by highest priority.
+    Per TEA spec: uses SemVer 2.0.0 comparison to match versions, then
+    prioritizes by highest matching version, with priority as tiebreaker.
     """
-    candidates = [ep for ep in well_known.endpoints if supported_version in ep.versions]
+    target = _SemVer(supported_version)
+
+    # For each endpoint, find the highest version matching the target via SemVer equality.
+    # This handles cases like "1.0" matching "1.0.0" (patch defaults to 0).
+    candidates: list[tuple[_SemVer, TeaEndpoint]] = []
+    for ep in well_known.endpoints:
+        best_match: _SemVer | None = None
+        for v_str in ep.versions:
+            try:
+                v = _SemVer(v_str)
+            except ValueError:
+                continue
+            if v == target and (best_match is None or v > best_match):
+                best_match = v
+        if best_match is not None:
+            candidates.append((best_match, ep))
 
     if not candidates:
         available = {v for ep in well_known.endpoints for v in ep.versions}
@@ -75,5 +186,9 @@ def select_endpoint(well_known: TeaWellKnown, supported_version: str) -> TeaEndp
             f"No compatible endpoint found for version {supported_version!r}. Available versions: {sorted(available)}"
         )
 
-    candidates.sort(key=lambda ep: ep.priority if ep.priority is not None else 1.0, reverse=True)
-    return candidates[0]
+    # Sort by: highest SemVer version desc, then priority desc (default 1.0 per spec)
+    candidates.sort(
+        key=lambda pair: (pair[0], pair[1].priority if pair[1].priority is not None else 1.0),
+        reverse=True,
+    )
+    return candidates[0][1]
diff --git a/libtea/exceptions.py b/libtea/exceptions.py
index 1342660..626fd3c 100644
--- a/libtea/exceptions.py
+++ b/libtea/exceptions.py
@@ -22,7 +22,7 @@ def __init__(self, message: str, *, error_type: str | None = None):
 
 
 class TeaRequestError(TeaError):
-    """HTTP 400 or other client error."""
+    """Unexpected HTTP redirect (3xx) or client error (4xx other than 401/403/404)."""
 
 
 class TeaServerError(TeaError):
@@ -52,3 +52,7 @@ def __init__(
 
 class TeaValidationError(TeaError):
     """Malformed server response that fails Pydantic validation."""
+
+
+class TeaInsecureTransportWarning(UserWarning):
+    """Warning emitted when using plaintext HTTP instead of HTTPS."""
diff --git a/libtea/models.py b/libtea/models.py
index 3c789b4..05b0ca6 100644
--- a/libtea/models.py
+++ b/libtea/models.py
@@ -4,7 +4,7 @@
 from enum import StrEnum
 from typing import Literal
 
-from pydantic import BaseModel, ConfigDict, field_validator
+from pydantic import BaseModel, ConfigDict, Field, field_validator
 from pydantic.alias_generators import to_camel
 
 
@@ -14,6 +14,8 @@ class _TeaModel(BaseModel):
     model_config = ConfigDict(
         alias_generator=to_camel,
         populate_by_name=True,
+        extra="ignore",
+        frozen=True,
     )
 
 
@@ -42,6 +44,10 @@ class ChecksumAlgorithm(StrEnum):
     BLAKE3 = "BLAKE3"
 
 
+_CHECKSUM_VALUES = frozenset(e.value for e in ChecksumAlgorithm)
+_CHECKSUM_NAME_TO_VALUE = {e.name: e.value for e in ChecksumAlgorithm}
+
+
 class ArtifactType(StrEnum):
     ATTESTATION = "ATTESTATION"
     BOM = "BOM"
@@ -94,8 +100,8 @@ def normalize_alg_type(cls, v: str) -> str:
         Uses member-name lookup instead of blind replace to handle
         BLAKE2b casing correctly (BLAKE2B_256 -> BLAKE2b-256).
         """
-        if isinstance(v, str) and v not in {e.value for e in ChecksumAlgorithm}:
-            mapped = {e.name: e.value for e in ChecksumAlgorithm}.get(v)
+        if isinstance(v, str) and v not in _CHECKSUM_VALUES:
+            mapped = _CHECKSUM_NAME_TO_VALUE.get(v)
             if mapped is not None:
                 return mapped
         return v
@@ -168,7 +174,7 @@ class Release(_TeaModel):
 
 class ComponentReleaseWithCollection(_TeaModel):
     release: Release
-    latest_collection: Collection | None = None
+    latest_collection: Collection
 
 
 class Product(_TeaModel):
@@ -217,19 +223,19 @@ class PaginatedProductReleaseResponse(_TeaModel):
 
 class TeaEndpoint(_TeaModel):
     url: str
-    versions: list[str]
-    priority: float | None = None
+    versions: list[str] = Field(min_length=1)
+    priority: float | None = Field(default=None, ge=0, le=1)
 
 
 class TeaWellKnown(_TeaModel):
     schema_version: Literal[1]
-    endpoints: list[TeaEndpoint]
+    endpoints: list[TeaEndpoint] = Field(min_length=1)
 
 
 class TeaServerInfo(_TeaModel):
     root_url: str
-    versions: list[str]
-    priority: float | None = None
+    versions: list[str] = Field(min_length=1)
+    priority: float | None = Field(default=None, ge=0, le=1)
 
 
 class DiscoveryInfo(_TeaModel):
diff --git a/pyproject.toml b/pyproject.toml
index 55e9256..f1a3f4c 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -50,7 +50,7 @@ build-backend = "hatchling.build"
 [tool.pytest.ini_options]
 testpaths = ["tests"]
 python_files = ["test_*.py"]
-addopts = "--cov=libtea --cov-report=term-missing"
+addopts = "--cov=libtea --cov-report=term-missing --cov-branch"
 
 [tool.ruff]
 line-length = 120
diff --git a/tests/test_client.py b/tests/test_client.py
index 0bdc9af..b01691d 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -1,7 +1,8 @@
 import pytest
 import responses
 
-from libtea.client import TeaClient
+from libtea.client import TeaClient, _validate_path_segment
+from libtea.exceptions import TeaDiscoveryError, TeaValidationError
 from libtea.models import (
     Artifact,
     Collection,
@@ -13,14 +14,13 @@
     ProductRelease,
     Release,
 )
-from tests.conftest import BASE_URL as BASE
 
 
 class TestSearchProducts:
     @responses.activate
-    def test_search_products_by_purl(self, client):
+    def test_search_products_by_purl(self, client, base_url):
         responses.get(
-            f"{BASE}/products",
+            f"{base_url}/products",
             json={
                 "timestamp": "2024-03-20T15:30:00Z",
                 "pageStartIndex": 0,
@@ -44,9 +44,9 @@ def test_search_products_by_purl(self, client):
         assert "idValue=pkg" in str(request.url)
 
     @responses.activate
-    def test_search_products_pagination(self, client):
+    def test_search_products_pagination(self, client, base_url):
         responses.get(
-            f"{BASE}/products",
+            f"{base_url}/products",
             json={
                 "timestamp": "2024-03-20T15:30:00Z",
                 "pageStartIndex": 10,
@@ -62,9 +62,9 @@ def test_search_products_pagination(self, client):
         assert resp.page_start_index == 10
 
     @responses.activate
-    def test_search_products_empty(self, client):
+    def test_search_products_empty(self, client, base_url):
         responses.get(
-            f"{BASE}/products",
+            f"{base_url}/products",
             json={
                 "timestamp": "2024-03-20T15:30:00Z",
                 "pageStartIndex": 0,
@@ -80,9 +80,9 @@ def test_search_products_empty(self, client):
 
 class TestSearchProductReleases:
     @responses.activate
-    def test_search_product_releases_by_purl(self, client):
+    def test_search_product_releases_by_purl(self, client, base_url):
         responses.get(
-            f"{BASE}/productReleases",
+            f"{base_url}/productReleases",
             json={
                 "timestamp": "2024-03-20T15:30:00Z",
                 "pageStartIndex": 0,
@@ -108,9 +108,9 @@ def test_search_product_releases_by_purl(self, client):
 
 class TestProduct:
     @responses.activate
-    def test_get_product(self, client):
+    def test_get_product(self, client, base_url):
         responses.get(
-            f"{BASE}/product/abc-123",
+            f"{base_url}/product/abc-123",
             json={
                 "uuid": "abc-123",
                 "name": "Test Product",
@@ -122,9 +122,9 @@ def test_get_product(self, client):
         assert product.name == "Test Product"
 
     @responses.activate
-    def test_get_product_releases(self, client):
+    def test_get_product_releases(self, client, base_url):
         responses.get(
-            f"{BASE}/product/abc-123/releases",
+            f"{base_url}/product/abc-123/releases",
             json={
                 "timestamp": "2024-03-20T15:30:00Z",
                 "pageStartIndex": 0,
@@ -147,9 +147,9 @@ def test_get_product_releases(self, client):
 
 class TestProductRelease:
     @responses.activate
-    def test_get_product_release(self, client):
+    def test_get_product_release(self, client, base_url):
         responses.get(
-            f"{BASE}/productRelease/rel-1",
+            f"{base_url}/productRelease/rel-1",
             json={
                 "uuid": "rel-1",
                 "version": "1.0.0",
@@ -162,9 +162,9 @@ def test_get_product_release(self, client):
         assert release.version == "1.0.0"
 
     @responses.activate
-    def test_get_product_release_collection_latest(self, client):
+    def test_get_product_release_collection_latest(self, client, base_url):
         responses.get(
-            f"{BASE}/productRelease/rel-1/collection/latest",
+            f"{base_url}/productRelease/rel-1/collection/latest",
             json={
                 "uuid": "rel-1",
                 "version": 1,
@@ -177,9 +177,9 @@ def test_get_product_release_collection_latest(self, client):
 
 class TestComponent:
     @responses.activate
-    def test_get_component(self, client):
+    def test_get_component(self, client, base_url):
         responses.get(
-            f"{BASE}/component/comp-1",
+            f"{base_url}/component/comp-1",
             json={
                 "uuid": "comp-1",
                 "name": "Test Component",
@@ -191,9 +191,9 @@ def test_get_component(self, client):
         assert component.name == "Test Component"
 
     @responses.activate
-    def test_get_component_releases(self, client):
+    def test_get_component_releases(self, client, base_url):
         responses.get(
-            f"{BASE}/component/comp-1/releases",
+            f"{base_url}/component/comp-1/releases",
             json=[
                 {"uuid": "cr-1", "version": "1.0.0", "createdDate": "2024-01-01T00:00:00Z"},
             ],
@@ -205,9 +205,9 @@ def test_get_component_releases(self, client):
 
 class TestComponentRelease:
     @responses.activate
-    def test_get_component_release(self, client):
+    def test_get_component_release(self, client, base_url):
         responses.get(
-            f"{BASE}/componentRelease/cr-1",
+            f"{base_url}/componentRelease/cr-1",
             json={
                 "release": {"uuid": "cr-1", "version": "1.0.0", "createdDate": "2024-01-01T00:00:00Z"},
                 "latestCollection": {"uuid": "cr-1", "version": 1, "artifacts": []},
@@ -219,23 +219,21 @@ def test_get_component_release(self, client):
         assert result.latest_collection is not None
 
     @responses.activate
-    def test_get_component_release_without_collection(self, client):
+    def test_get_component_release_missing_collection_raises(self, client, base_url):
+        """Per TEA spec, latestCollection is required — missing it should raise."""
         responses.get(
-            f"{BASE}/componentRelease/cr-2",
+            f"{base_url}/componentRelease/cr-2",
             json={
                 "release": {"uuid": "cr-2", "version": "2.0.0", "createdDate": "2024-01-01T00:00:00Z"},
-                "latestCollection": None,
             },
         )
-        result = client.get_component_release("cr-2")
-        assert isinstance(result, ComponentReleaseWithCollection)
-        assert result.release.version == "2.0.0"
-        assert result.latest_collection is None
+        with pytest.raises(TeaValidationError, match="Invalid ComponentReleaseWithCollection"):
+            client.get_component_release("cr-2")
 
     @responses.activate
-    def test_get_component_release_collection_latest(self, client):
+    def test_get_component_release_collection_latest(self, client, base_url):
         responses.get(
-            f"{BASE}/componentRelease/cr-1/collection/latest",
+            f"{base_url}/componentRelease/cr-1/collection/latest",
             json={"uuid": "cr-1", "version": 2, "artifacts": []},
         )
         collection = client.get_component_release_collection_latest("cr-1")
@@ -243,9 +241,9 @@ def test_get_component_release_collection_latest(self, client):
         assert collection.version == 2
 
     @responses.activate
-    def test_get_component_release_collections(self, client):
+    def test_get_component_release_collections(self, client, base_url):
         responses.get(
-            f"{BASE}/componentRelease/cr-1/collections",
+            f"{base_url}/componentRelease/cr-1/collections",
             json=[
                 {"uuid": "cr-1", "version": 1, "artifacts": []},
                 {"uuid": "cr-1", "version": 2, "artifacts": []},
@@ -255,9 +253,9 @@ def test_get_component_release_collections(self, client):
         assert len(collections) == 2
 
     @responses.activate
-    def test_get_component_release_collection_by_version(self, client):
+    def test_get_component_release_collection_by_version(self, client, base_url):
         responses.get(
-            f"{BASE}/componentRelease/cr-1/collection/3",
+            f"{base_url}/componentRelease/cr-1/collection/3",
             json={"uuid": "cr-1", "version": 3, "artifacts": []},
         )
         collection = client.get_component_release_collection("cr-1", 3)
@@ -266,9 +264,9 @@ def test_get_component_release_collection_by_version(self, client):
 
 class TestArtifact:
     @responses.activate
-    def test_get_artifact(self, client):
+    def test_get_artifact(self, client, base_url):
         responses.get(
-            f"{BASE}/artifact/art-1",
+            f"{base_url}/artifact/art-1",
             json={
                 "uuid": "art-1",
                 "name": "SBOM",
@@ -289,10 +287,10 @@ def test_get_artifact(self, client):
 
 class TestDiscovery:
     @responses.activate
-    def test_discover(self, client):
+    def test_discover(self, client, base_url):
         tei = "urn:tei:uuid:example.com:d4d9f54a-abcf-11ee-ac79-1a52914d44b"
         responses.get(
-            f"{BASE}/discovery",
+            f"{base_url}/discovery",
             json=[
                 {
                     "productReleaseUuid": "d4d9f54a-abcf-11ee-ac79-1a52914d44b",
@@ -308,8 +306,8 @@ def test_discover(self, client):
         assert "tei=" in str(request.url)
 
     @responses.activate
-    def test_discover_empty_result(self, client):
-        responses.get(f"{BASE}/discovery", json=[])
+    def test_discover_empty_result(self, client, base_url):
+        responses.get(f"{base_url}/discovery", json=[])
         results = client.discover("urn:tei:uuid:example.com:d4d9f54a")
         assert results == []
 
@@ -330,8 +328,6 @@ def test_from_well_known_creates_client(self):
 
     @responses.activate
     def test_from_well_known_no_compatible_version_raises(self):
-        from libtea.exceptions import TeaDiscoveryError
-
         responses.get(
             "https://example.com/.well-known/tea",
             json={
@@ -339,11 +335,11 @@ def test_from_well_known_no_compatible_version_raises(self):
                 "endpoints": [{"url": "https://api.example.com", "versions": ["99.0.0"]}],
             },
         )
-        with pytest.raises(TeaDiscoveryError):
+        with pytest.raises(TeaDiscoveryError, match="No compatible endpoint"):
             TeaClient.from_well_known("example.com")
 
     @responses.activate
-    def test_from_well_known_passes_token(self):
+    def test_from_well_known_passes_token(self, base_url):
         responses.get(
             "https://example.com/.well-known/tea",
             json={
@@ -363,9 +359,9 @@ def test_from_well_known_passes_token(self):
 
 class TestPagination:
     @responses.activate
-    def test_get_product_releases_pagination_params(self, client):
+    def test_get_product_releases_pagination_params(self, client, base_url):
         responses.get(
-            f"{BASE}/product/abc-123/releases",
+            f"{base_url}/product/abc-123/releases",
             json={
                 "timestamp": "2024-03-20T15:30:00Z",
                 "pageStartIndex": 50,
@@ -383,9 +379,9 @@ def test_get_product_releases_pagination_params(self, client):
 
 class TestProductReleaseCollections:
     @responses.activate
-    def test_get_product_release_collections(self, client):
+    def test_get_product_release_collections(self, client, base_url):
         responses.get(
-            f"{BASE}/productRelease/rel-1/collections",
+            f"{base_url}/productRelease/rel-1/collections",
             json=[
                 {"uuid": "rel-1", "version": 1, "artifacts": []},
                 {"uuid": "rel-1", "version": 2, "artifacts": []},
@@ -396,9 +392,9 @@ def test_get_product_release_collections(self, client):
         assert collections[0].version == 1
 
     @responses.activate
-    def test_get_product_release_collection_by_version(self, client):
+    def test_get_product_release_collection_by_version(self, client, base_url):
         responses.get(
-            f"{BASE}/productRelease/rel-1/collection/5",
+            f"{base_url}/productRelease/rel-1/collection/5",
             json={"uuid": "rel-1", "version": 5, "artifacts": []},
         )
         collection = client.get_product_release_collection("rel-1", 5)
@@ -407,34 +403,61 @@ def test_get_product_release_collection_by_version(self, client):
 
 class TestValidationErrors:
     @responses.activate
-    def test_validate_raises_tea_validation_error(self, client):
-        from libtea.exceptions import TeaValidationError
-
+    def test_validate_raises_tea_validation_error(self, client, base_url):
         # Missing required fields triggers Pydantic ValidationError → TeaValidationError
-        responses.get(f"{BASE}/product/abc", json={"bad": "data"})
+        responses.get(f"{base_url}/product/abc", json={"bad": "data"})
         with pytest.raises(TeaValidationError, match="Invalid Product response"):
             client.get_product("abc")
 
     @responses.activate
-    def test_validate_list_raises_tea_validation_error(self, client):
-        from libtea.exceptions import TeaValidationError
-
+    def test_validate_list_raises_tea_validation_error(self, client, base_url):
         # List with invalid items triggers Pydantic ValidationError → TeaValidationError
         responses.get(
-            f"{BASE}/component/comp-1/releases",
+            f"{base_url}/component/comp-1/releases",
             json=[{"bad": "data"}],
         )
         with pytest.raises(TeaValidationError, match="Invalid Release response"):
             client.get_component_releases("comp-1")
 
 
+class TestValidatePathSegment:
+    def test_accepts_uuid(self):
+        assert _validate_path_segment("d4d9f54a-abcf-11ee-ac79-1a52914d44b1") == "d4d9f54a-abcf-11ee-ac79-1a52914d44b1"
+
+    def test_accepts_alphanumeric(self):
+        assert _validate_path_segment("abc123") == "abc123"
+
+    @pytest.mark.parametrize(
+        "value",
+        [
+            "../../etc/passwd",
+            "abc/def",
+            "abc def",
+            "abc?query=1",
+            "abc#fragment",
+            "abc@host",
+            "abc.def",
+            "",
+            "a" * 129,
+            "abc\x00def",
+        ],
+    )
+    def test_rejects_unsafe_values(self, value):
+        with pytest.raises(TeaValidationError, match="Invalid uuid"):
+            _validate_path_segment(value)
+
+    def test_error_message_includes_guidance(self):
+        with pytest.raises(TeaValidationError, match="alphanumeric characters and hyphens"):
+            _validate_path_segment("../traversal")
+
+
 class TestContextManager:
     @responses.activate
-    def test_client_as_context_manager(self):
+    def test_client_as_context_manager(self, base_url):
         responses.get(
-            f"{BASE}/component/c1",
+            f"{base_url}/component/c1",
             json={"uuid": "c1", "name": "C1", "identifiers": []},
         )
-        with TeaClient(base_url=BASE) as client:
+        with TeaClient(base_url=base_url) as client:
             component = client.get_component("c1")
             assert component.name == "C1"
diff --git a/tests/test_discovery.py b/tests/test_discovery.py
index cd51f2a..fc69933 100644
--- a/tests/test_discovery.py
+++ b/tests/test_discovery.py
@@ -1,8 +1,9 @@
 import pytest
 import requests
 import responses
+from pydantic import ValidationError
 
-from libtea.discovery import fetch_well_known, parse_tei, select_endpoint
+from libtea.discovery import _SemVer, fetch_well_known, parse_tei, select_endpoint
 from libtea.exceptions import TeaDiscoveryError
 from libtea.models import TeaEndpoint, TeaWellKnown
 
@@ -49,9 +50,12 @@ def test_invalid_tei_unknown_type(self):
         with pytest.raises(TeaDiscoveryError, match="Invalid TEI type"):
             parse_tei("urn:tei:unknown:example.com:some-id")
 
-    def test_all_valid_tei_types(self):
-        for tei_type in ("uuid", "purl", "hash", "swid", "eanupc", "gtin", "asin", "udi"):
-            _, _, _ = parse_tei(f"urn:tei:{tei_type}:example.com:some-id")
+    @pytest.mark.parametrize("tei_type", ["uuid", "purl", "hash", "swid", "eanupc", "gtin", "asin", "udi"])
+    def test_all_valid_tei_types(self, tei_type):
+        result_type, domain, identifier = parse_tei(f"urn:tei:{tei_type}:example.com:some-id")
+        assert result_type == tei_type
+        assert domain == "example.com"
+        assert identifier == "some-id"
 
     def test_invalid_tei_empty_domain(self):
         with pytest.raises(TeaDiscoveryError, match="Invalid domain"):
@@ -133,6 +137,24 @@ def test_fetch_well_known_500_raises_discovery_error(self):
         with pytest.raises(TeaDiscoveryError):
             fetch_well_known("example.com")
 
+    def test_fetch_well_known_empty_domain_raises(self):
+        with pytest.raises(TeaDiscoveryError, match="Invalid domain"):
+            fetch_well_known("")
+
+    def test_fetch_well_known_invalid_domain_raises(self):
+        with pytest.raises(TeaDiscoveryError, match="Invalid domain"):
+            fetch_well_known("-bad.com")
+
+    def test_fetch_well_known_underscore_domain_raises(self):
+        with pytest.raises(TeaDiscoveryError, match="Invalid domain"):
+            fetch_well_known("bad_domain.com")
+
+    @responses.activate
+    def test_fetch_well_known_request_exception(self):
+        responses.get("https://example.com/.well-known/tea", body=requests.exceptions.TooManyRedirects("too many"))
+        with pytest.raises(TeaDiscoveryError, match="HTTP error"):
+            fetch_well_known("example.com")
+
     @responses.activate
     def test_fetch_well_known_non_json_raises_discovery_error(self):
         responses.get("https://example.com/.well-known/tea", body="not json")
@@ -191,17 +213,134 @@ def test_prefers_highest_matching_version(self):
         ep = select_endpoint(wk, "1.0.0")
         assert ep.url == "https://new.example.com"
 
-    def test_empty_endpoints_raises(self):
-        wk = TeaWellKnown(schema_version=1, endpoints=[])
+    def test_empty_endpoints_rejected_by_model(self):
+        """TeaWellKnown enforces min_length=1 on endpoints per spec."""
+        with pytest.raises(ValidationError):
+            TeaWellKnown(schema_version=1, endpoints=[])
+
+    def test_none_priority_defaults_to_1(self):
+        """Endpoint without priority defaults to 1.0 (highest), matching spec default."""
+        wk = self._make_well_known(
+            [
+                {"url": "https://none-priority.example.com", "versions": ["1.0.0"]},
+                {"url": "https://low-priority.example.com", "versions": ["1.0.0"], "priority": 0.5},
+            ]
+        )
+        ep = select_endpoint(wk, "1.0.0")
+        assert ep.url == "https://none-priority.example.com"
+
+    def test_semver_matches_without_patch(self):
+        """Version '1.0' in endpoint should match client version '1.0.0'."""
+        wk = self._make_well_known(
+            [
+                {"url": "https://api.example.com", "versions": ["1.0"]},
+            ]
+        )
+        ep = select_endpoint(wk, "1.0.0")
+        assert ep.url == "https://api.example.com"
+
+    def test_semver_matches_with_prerelease(self):
+        """Pre-release versions match exactly."""
+        wk = self._make_well_known(
+            [
+                {"url": "https://api.example.com", "versions": ["0.3.0-beta.2"]},
+            ]
+        )
+        ep = select_endpoint(wk, "0.3.0-beta.2")
+        assert ep.url == "https://api.example.com"
+
+    def test_semver_prerelease_does_not_match_release(self):
+        """Pre-release '1.0.0-beta.1' should not match '1.0.0'."""
+        wk = self._make_well_known(
+            [
+                {"url": "https://api.example.com", "versions": ["1.0.0-beta.1"]},
+            ]
+        )
         with pytest.raises(TeaDiscoveryError, match="No compatible endpoint"):
             select_endpoint(wk, "1.0.0")
 
-    def test_none_priority_vs_explicit_priority(self):
+    def test_invalid_semver_in_endpoint_skipped(self):
+        """Invalid version strings in endpoint are silently skipped."""
         wk = self._make_well_known(
             [
-                {"url": "https://none-priority.example.com", "versions": ["1.0.0"]},
-                {"url": "https://high-priority.example.com", "versions": ["1.0.0"], "priority": 2.0},
+                {"url": "https://api.example.com", "versions": ["not-semver", "1.0.0"]},
             ]
         )
         ep = select_endpoint(wk, "1.0.0")
-        assert ep.url == "https://high-priority.example.com"
+        assert ep.url == "https://api.example.com"
+
+    def test_priority_out_of_range_rejected(self):
+        """Priority > 1.0 should be rejected by model validation."""
+        with pytest.raises(ValidationError):
+            TeaEndpoint(url="https://api.example.com", versions=["1.0.0"], priority=2.0)
+
+    def test_empty_versions_rejected(self):
+        """Endpoint with empty versions list should be rejected by model validation."""
+        with pytest.raises(ValidationError):
+            TeaEndpoint(url="https://api.example.com", versions=[])
+
+
+class TestSemVer:
+    def test_parse_basic(self):
+        v = _SemVer("1.2.3")
+        assert v.major == 1
+        assert v.minor == 2
+        assert v.patch == 3
+        assert v.pre == ()
+
+    def test_parse_without_patch(self):
+        v = _SemVer("1.0")
+        assert v.major == 1
+        assert v.minor == 0
+        assert v.patch == 0
+
+    def test_parse_with_prerelease(self):
+        v = _SemVer("0.3.0-beta.2")
+        assert v.major == 0
+        assert v.minor == 3
+        assert v.patch == 0
+        assert v.pre == ("beta", 2)
+
+    def test_equality_with_and_without_patch(self):
+        assert _SemVer("1.0") == _SemVer("1.0.0")
+
+    def test_ordering_major(self):
+        assert _SemVer("1.0.0") < _SemVer("2.0.0")
+
+    def test_ordering_minor(self):
+        assert _SemVer("1.0.0") < _SemVer("1.1.0")
+
+    def test_ordering_patch(self):
+        assert _SemVer("1.0.0") < _SemVer("1.0.1")
+
+    def test_prerelease_lower_than_release(self):
+        assert _SemVer("1.0.0-alpha") < _SemVer("1.0.0")
+
+    def test_prerelease_ordering(self):
+        """SemVer spec example: 1.0.0-alpha < 1.0.0-alpha.1 < 1.0.0-alpha.beta < 1.0.0-beta < 1.0.0-beta.2 < 1.0.0-beta.11 < 1.0.0-rc.1 < 1.0.0"""
+        versions = [
+            "1.0.0-alpha",
+            "1.0.0-alpha.1",
+            "1.0.0-alpha.beta",
+            "1.0.0-beta",
+            "1.0.0-beta.2",
+            "1.0.0-beta.11",
+            "1.0.0-rc.1",
+            "1.0.0",
+        ]
+        parsed = [_SemVer(v) for v in versions]
+        for i in range(len(parsed) - 1):
+            assert parsed[i] < parsed[i + 1], f"{versions[i]} should be < {versions[i + 1]}"
+
+    def test_numeric_prerelease_less_than_alpha(self):
+        """Numeric identifiers have lower precedence than alphanumeric."""
+        assert _SemVer("1.0.0-1") < _SemVer("1.0.0-alpha")
+
+    def test_invalid_semver_raises(self):
+        with pytest.raises(ValueError, match="Invalid SemVer"):
+            _SemVer("not-a-version")
+
+    def test_str_repr(self):
+        v = _SemVer("1.2.3-beta.1")
+        assert str(v) == "1.2.3-beta.1"
+        assert repr(v) == "_SemVer('1.2.3-beta.1')"
diff --git a/tests/test_download.py b/tests/test_download.py
index 4d08d9a..222eb54 100644
--- a/tests/test_download.py
+++ b/tests/test_download.py
@@ -34,9 +34,12 @@ def test_download_with_invalid_checksum_deletes_file(self, client, tmp_path):
         responses.get(ARTIFACT_URL, body=ARTIFACT_CONTENT)
         checksums = [Checksum(alg_type=ChecksumAlgorithm.SHA_256, alg_value="badhash")]
         dest = tmp_path / "sbom.json"
-        with pytest.raises(TeaChecksumError, match="SHA-256"):
+        with pytest.raises(TeaChecksumError, match="SHA-256") as exc_info:
             client.download_artifact(ARTIFACT_URL, dest, verify_checksums=checksums)
         assert not dest.exists()
+        assert exc_info.value.algorithm == "SHA-256"
+        assert exc_info.value.expected == "badhash"
+        assert exc_info.value.actual is not None
 
     @responses.activate
     def test_download_with_multiple_checksums(self, client, tmp_path):
@@ -63,8 +66,9 @@ def test_download_checksum_uppercase_hex_accepted(self, client, tmp_path):
     def test_download_with_blake3_raises_clear_error(self, client, tmp_path):
         checksums = [Checksum(alg_type=ChecksumAlgorithm.BLAKE3, alg_value="somevalue")]
         dest = tmp_path / "sbom.json"
-        with pytest.raises(TeaChecksumError, match="BLAKE3"):
+        with pytest.raises(TeaChecksumError, match="BLAKE3") as exc_info:
             client.download_artifact(ARTIFACT_URL, dest, verify_checksums=checksums)
+        assert exc_info.value.algorithm == "BLAKE3"
 
     @responses.activate
     def test_download_with_unknown_algorithm_raises_clear_error(self, client, tmp_path):
@@ -77,6 +81,30 @@ def test_download_with_unknown_algorithm_raises_clear_error(self, client, tmp_pa
         # Patch download_with_hashes to return empty dict (no algorithms computed)
         with patch.object(client._http, "download_with_hashes", return_value={}):
             checksums = [Checksum(alg_type=ChecksumAlgorithm.SHA_256, alg_value="abc123")]
-            with pytest.raises(TeaChecksumError, match="No computed digest"):
+            with pytest.raises(TeaChecksumError, match="No computed digest") as exc_info:
                 client.download_artifact(ARTIFACT_URL, dest, verify_checksums=checksums)
             assert not dest.exists()
+            assert exc_info.value.algorithm == "SHA-256"
+
+    @responses.activate
+    def test_download_zero_byte_artifact(self, client, tmp_path):
+        """Zero-byte artifacts are valid (e.g. stub SBOMs)."""
+        responses.get(ARTIFACT_URL, body=b"")
+        sha256 = hashlib.sha256(b"").hexdigest()
+        checksums = [Checksum(alg_type=ChecksumAlgorithm.SHA_256, alg_value=sha256)]
+        dest = tmp_path / "empty.json"
+        result = client.download_artifact(ARTIFACT_URL, dest, verify_checksums=checksums)
+        assert result == dest
+        assert dest.read_bytes() == b""
+
+    @responses.activate
+    def test_download_multi_chunk_artifact(self, client, tmp_path):
+        """Content > 8192 bytes exercises multi-chunk hashing."""
+        content = b"A" * 20000
+        responses.get(ARTIFACT_URL, body=content)
+        sha256 = hashlib.sha256(content).hexdigest()
+        checksums = [Checksum(alg_type=ChecksumAlgorithm.SHA_256, alg_value=sha256)]
+        dest = tmp_path / "large.json"
+        result = client.download_artifact(ARTIFACT_URL, dest, verify_checksums=checksums)
+        assert result == dest
+        assert dest.read_bytes() == content
diff --git a/tests/test_http.py b/tests/test_http.py
index b24fc41..5897c34 100644
--- a/tests/test_http.py
+++ b/tests/test_http.py
@@ -1,14 +1,17 @@
 import hashlib
+import warnings
 from unittest.mock import patch
 
 import pytest
 import requests
 import responses
 
-from libtea._http import TeaHttpClient, _get_package_version
+from libtea._http import TeaHttpClient, _build_hashers, _get_package_version, _validate_download_url
 from libtea.exceptions import (
     TeaAuthenticationError,
+    TeaChecksumError,
     TeaConnectionError,
+    TeaInsecureTransportWarning,
     TeaNotFoundError,
     TeaRequestError,
     TeaServerError,
@@ -34,58 +37,64 @@ def test_get_json_with_bearer_token(self, base_url):
     @responses.activate
     def test_404_raises_not_found_with_error_type(self, http_client, base_url):
         responses.get(f"{base_url}/product/missing", json={"error": "OBJECT_UNKNOWN"}, status=404)
-        with pytest.raises(TeaNotFoundError) as exc_info:
+        with pytest.raises(TeaNotFoundError, match="HTTP 404") as exc_info:
             http_client.get_json("/product/missing")
         assert exc_info.value.error_type == "OBJECT_UNKNOWN"
 
     @responses.activate
     def test_404_with_object_not_shareable(self, http_client, base_url):
         responses.get(f"{base_url}/product/restricted", json={"error": "OBJECT_NOT_SHAREABLE"}, status=404)
-        with pytest.raises(TeaNotFoundError) as exc_info:
+        with pytest.raises(TeaNotFoundError, match="HTTP 404") as exc_info:
             http_client.get_json("/product/restricted")
         assert exc_info.value.error_type == "OBJECT_NOT_SHAREABLE"
 
     @responses.activate
     def test_404_with_non_json_body(self, http_client, base_url):
         responses.get(f"{base_url}/product/missing", body="Not Found", status=404)
-        with pytest.raises(TeaNotFoundError) as exc_info:
+        with pytest.raises(TeaNotFoundError, match="HTTP 404") as exc_info:
             http_client.get_json("/product/missing")
         assert exc_info.value.error_type is None
 
     @responses.activate
     def test_401_raises_auth_error(self, http_client, base_url):
         responses.get(f"{base_url}/product/abc", status=401)
-        with pytest.raises(TeaAuthenticationError):
+        with pytest.raises(TeaAuthenticationError, match="HTTP 401"):
             http_client.get_json("/product/abc")
 
     @responses.activate
     def test_403_raises_auth_error(self, http_client, base_url):
         responses.get(f"{base_url}/product/abc", status=403)
-        with pytest.raises(TeaAuthenticationError):
+        with pytest.raises(TeaAuthenticationError, match="HTTP 403"):
             http_client.get_json("/product/abc")
 
     @responses.activate
     def test_400_raises_request_error(self, http_client, base_url):
         responses.get(f"{base_url}/product/abc", status=400)
-        with pytest.raises(TeaRequestError):
+        with pytest.raises(TeaRequestError, match="HTTP 400"):
             http_client.get_json("/product/abc")
 
     @responses.activate
     def test_500_raises_server_error(self, http_client, base_url):
         responses.get(f"{base_url}/product/abc", status=500)
-        with pytest.raises(TeaServerError):
+        with pytest.raises(TeaServerError, match="HTTP 500"):
             http_client.get_json("/product/abc")
 
     @responses.activate
     def test_502_raises_server_error(self, http_client, base_url):
         responses.get(f"{base_url}/product/abc", status=502)
-        with pytest.raises(TeaServerError):
+        with pytest.raises(TeaServerError, match="HTTP 502"):
+            http_client.get_json("/product/abc")
+
+    @responses.activate
+    def test_3xx_raises_request_error(self, http_client, base_url):
+        responses.get(f"{base_url}/product/abc", status=301)
+        with pytest.raises(TeaRequestError, match="redirect.*HTTP 301"):
             http_client.get_json("/product/abc")
 
     @responses.activate
     def test_connection_error(self, http_client, base_url):
         responses.get(f"{base_url}/product/abc", body=requests.ConnectionError("refused"))
-        with pytest.raises(TeaConnectionError):
+        with pytest.raises(TeaConnectionError, match="refused"):
             http_client.get_json("/product/abc")
 
     @responses.activate
@@ -106,7 +115,7 @@ def test_stream_to_file(self, http_client, tmp_path):
     def test_download_cleans_up_partial_file_on_transport_error(self, http_client, tmp_path):
         responses.get("https://artifacts.example.com/sbom.xml", body=requests.ConnectionError("refused"))
         dest = tmp_path / "sbom.xml"
-        with pytest.raises(TeaConnectionError):
+        with pytest.raises(TeaConnectionError, match="refused"):
             http_client.download_with_hashes(url="https://artifacts.example.com/sbom.xml", dest=dest)
         assert not dest.exists()
 
@@ -158,10 +167,53 @@ def test_download_blake2b_256(self, http_client, tmp_path):
     def test_download_generic_exception_cleans_up(self, http_client, tmp_path):
         responses.get("https://artifacts.example.com/file.bin", status=500)
         dest = tmp_path / "file.bin"
-        with pytest.raises(TeaServerError):
+        with pytest.raises(TeaServerError, match="HTTP 500"):
             http_client.download_with_hashes(url="https://artifacts.example.com/file.bin", dest=dest)
         assert not dest.exists()
 
+    @responses.activate
+    def test_bearer_token_not_sent_to_artifact_url(self, tmp_path, base_url):
+        """The separate download session must NOT leak the bearer token to artifact hosts."""
+        artifact_url = "https://cdn.example.com/sbom.xml"
+        responses.get(artifact_url, body=b"content")
+        client = TeaHttpClient(base_url=base_url, token="secret-token")
+        client.download_with_hashes(url=artifact_url, dest=tmp_path / "f.xml")
+        assert "authorization" not in responses.calls[0].request.headers
+        client.close()
+
+    @responses.activate
+    def test_download_zero_byte_file(self, http_client, tmp_path):
+        responses.get("https://artifacts.example.com/empty.xml", body=b"")
+        dest = tmp_path / "empty.xml"
+        digests = http_client.download_with_hashes(
+            url="https://artifacts.example.com/empty.xml",
+            dest=dest,
+            algorithms=["SHA-256"],
+        )
+        assert dest.read_bytes() == b""
+        assert digests["SHA-256"] == hashlib.sha256(b"").hexdigest()
+
+    @responses.activate
+    def test_download_multi_chunk_file(self, http_client, tmp_path):
+        """Content larger than chunk_size (8192) exercises multi-chunk hashing."""
+        content = b"x" * 20000
+        responses.get("https://artifacts.example.com/large.bin", body=content)
+        dest = tmp_path / "large.bin"
+        digests = http_client.download_with_hashes(
+            url="https://artifacts.example.com/large.bin",
+            dest=dest,
+            algorithms=["SHA-256"],
+        )
+        assert dest.read_bytes() == content
+        assert digests["SHA-256"] == hashlib.sha256(content).hexdigest()
+
+    @responses.activate
+    def test_4xx_includes_response_body(self, http_client, base_url):
+        """4xx errors (other than 401/403/404) should include the response body."""
+        responses.get(f"{base_url}/product/abc", body="Bad request: missing field", status=422)
+        with pytest.raises(TeaRequestError, match="missing field"):
+            http_client.get_json("/product/abc")
+
 
 class TestBaseUrlValidation:
     def test_rejects_ftp_scheme(self):
@@ -176,10 +228,16 @@ def test_rejects_missing_hostname(self):
         with pytest.raises(ValueError, match="must include a hostname"):
             TeaHttpClient(base_url="http:///path/only")
 
-    def test_accepts_http(self):
-        client = TeaHttpClient(base_url="http://example.com/api")
-        assert client._base_url == "http://example.com/api"
-        client.close()
+    def test_http_without_token_warns(self):
+        with warnings.catch_warnings(record=True) as w:
+            warnings.simplefilter("always")
+            client = TeaHttpClient(base_url="http://example.com/api")
+            client.close()
+        assert any(issubclass(warning.category, TeaInsecureTransportWarning) for warning in w)
+
+    def test_http_with_token_raises(self):
+        with pytest.raises(ValueError, match="Cannot use bearer token with plaintext HTTP"):
+            TeaHttpClient(base_url="http://example.com/api", token="my-secret")
 
     def test_accepts_https(self):
         client = TeaHttpClient(base_url="https://example.com/api")
@@ -193,16 +251,133 @@ def test_strips_trailing_slash(self):
 
 
 class TestGetPackageVersion:
-    def test_fallback_to_tomllib(self):
-        with patch("importlib.metadata.version", side_effect=Exception("not installed")):
-            result = _get_package_version()
-            # Falls back to tomllib parsing of pyproject.toml
-            assert isinstance(result, str)
+    def test_returns_version_string(self):
+        result = _get_package_version()
+        assert isinstance(result, str)
+        assert result != ""
 
     def test_fallback_to_unknown(self):
-        with (
-            patch("importlib.metadata.version", side_effect=Exception("not installed")),
-            patch("tomllib.load", side_effect=Exception("parse error")),
-        ):
+        from importlib.metadata import PackageNotFoundError
+
+        with patch("importlib.metadata.version", side_effect=PackageNotFoundError("libtea")):
             result = _get_package_version()
             assert result == "unknown"
+
+
+class TestBuildHashers:
+    def test_blake3_raises(self):
+        with pytest.raises(TeaChecksumError, match="BLAKE3"):
+            _build_hashers(["BLAKE3"])
+
+    def test_unknown_algorithm_raises(self):
+        with pytest.raises(TeaChecksumError, match="Unsupported checksum algorithm"):
+            _build_hashers(["UNKNOWN-ALG"])
+
+    @pytest.mark.parametrize(
+        "algorithm",
+        ["MD5", "SHA-1", "SHA-256", "SHA-384", "SHA-512", "SHA3-256", "SHA3-384", "SHA3-512"],
+    )
+    def test_standard_algorithms(self, algorithm):
+        hashers = _build_hashers([algorithm])
+        assert algorithm in hashers
+        # Verify the hasher produces a hex digest
+        hashers[algorithm].update(b"test")
+        assert len(hashers[algorithm].hexdigest()) > 0
+
+    @pytest.mark.parametrize("algorithm,digest_size", [("BLAKE2b-256", 32), ("BLAKE2b-384", 48), ("BLAKE2b-512", 64)])
+    def test_blake2b_variants(self, algorithm, digest_size):
+        hashers = _build_hashers([algorithm])
+        assert algorithm in hashers
+        hashers[algorithm].update(b"test")
+        # BLAKE2b hex digest length = digest_size * 2
+        assert len(hashers[algorithm].hexdigest()) == digest_size * 2
+
+    @responses.activate
+    def test_all_algorithms_produce_correct_digests(self, tmp_path):
+        """End-to-end: download with each algorithm and verify the digest is correct."""
+        content = b"algorithm test content"
+        url = "https://artifacts.example.com/test.bin"
+        responses.get(url, body=content)
+
+        client = TeaHttpClient(base_url="https://api.example.com")
+        all_algs = ["MD5", "SHA-1", "SHA-256", "SHA-384", "SHA-512", "SHA3-256", "SHA3-384", "SHA3-512"]
+
+        dest = tmp_path / "test.bin"
+        digests = client.download_with_hashes(url=url, dest=dest, algorithms=all_algs)
+        client.close()
+
+        assert digests["MD5"] == hashlib.md5(content).hexdigest()
+        assert digests["SHA-1"] == hashlib.sha1(content).hexdigest()
+        assert digests["SHA-256"] == hashlib.sha256(content).hexdigest()
+        assert digests["SHA-384"] == hashlib.sha384(content).hexdigest()
+        assert digests["SHA-512"] == hashlib.sha512(content).hexdigest()
+        assert digests["SHA3-256"] == hashlib.new("sha3_256", content).hexdigest()
+        assert digests["SHA3-384"] == hashlib.new("sha3_384", content).hexdigest()
+        assert digests["SHA3-512"] == hashlib.new("sha3_512", content).hexdigest()
+
+
+class TestValidateDownloadUrl:
+    def test_rejects_file_scheme(self):
+        with pytest.raises(TeaValidationError, match="http or https scheme"):
+            _validate_download_url("file:///etc/passwd")
+
+    def test_rejects_ftp_scheme(self):
+        with pytest.raises(TeaValidationError, match="http or https scheme"):
+            _validate_download_url("ftp://evil.com/file")
+
+    def test_rejects_data_scheme(self):
+        with pytest.raises(TeaValidationError, match="http or https scheme"):
+            _validate_download_url("data:text/html,

hi

") + + def test_rejects_gopher_scheme(self): + with pytest.raises(TeaValidationError, match="http or https scheme"): + _validate_download_url("gopher://evil.com") + + def test_rejects_unknown_scheme(self): + with pytest.raises(TeaValidationError, match="http or https scheme"): + _validate_download_url("javascript:alert(1)") + + def test_rejects_missing_hostname(self): + with pytest.raises(TeaValidationError, match="must include a hostname"): + _validate_download_url("http:///path/only") + + def test_accepts_http(self): + _validate_download_url("http://example.com/file.xml") + + def test_accepts_https(self): + _validate_download_url("https://cdn.example.com/sbom.json") + + +class TestRequestExceptionCatchAll: + @responses.activate + def test_request_exception_in_get_json(self, http_client, base_url): + """RequestException subclasses beyond ConnectionError/Timeout are caught.""" + responses.get(f"{base_url}/product/abc", body=requests.exceptions.TooManyRedirects("too many")) + with pytest.raises(TeaConnectionError, match="too many"): + http_client.get_json("/product/abc") + + @responses.activate + def test_download_timeout_cleans_up(self, http_client, tmp_path): + """Timeout during download cleans up partial file.""" + responses.get("https://artifacts.example.com/sbom.xml", body=requests.Timeout("timed out")) + dest = tmp_path / "sbom.xml" + with pytest.raises(TeaConnectionError, match="timed out"): + http_client.download_with_hashes(url="https://artifacts.example.com/sbom.xml", dest=dest) + assert not dest.exists() + + +class TestEmptyBodyErrors: + @responses.activate + def test_4xx_with_empty_body(self, http_client, base_url): + """4xx with no body produces a clean error message.""" + responses.get(f"{base_url}/product/abc", body="", status=422) + with pytest.raises(TeaRequestError, match="Client error: HTTP 422"): + http_client.get_json("/product/abc") + + @responses.activate + def test_404_with_json_array_body(self, http_client, base_url): + """404 with non-dict JSON body does not crash.""" + responses.get(f"{base_url}/product/abc", json=["not", "a", "dict"], status=404) + with pytest.raises(TeaNotFoundError) as exc_info: + http_client.get_json("/product/abc") + assert exc_info.value.error_type is None diff --git a/tests/test_integration.py b/tests/test_integration.py index 27c3d89..0c44d8f 100644 --- a/tests/test_integration.py +++ b/tests/test_integration.py @@ -4,7 +4,6 @@ from libtea.client import TeaClient from libtea.models import ArtifactType, ChecksumAlgorithm, IdentifierType -from tests.conftest import BASE_URL as BASE # Example JSON taken directly from the TEA OpenAPI spec LOG4J_PRODUCT = { @@ -84,22 +83,22 @@ class TestSpecExamples: @responses.activate - def test_full_consumer_flow(self): + def test_full_consumer_flow(self, base_url): """Test the full consumer flow: product -> component releases -> collection -> artifacts.""" product_uuid = LOG4J_PRODUCT["uuid"] release_uuid = TOMCAT_RELEASE["uuid"] - responses.get(f"{BASE}/product/{product_uuid}", json=LOG4J_PRODUCT) + responses.get(f"{base_url}/product/{product_uuid}", json=LOG4J_PRODUCT) responses.get( - f"{BASE}/componentRelease/{release_uuid}", + f"{base_url}/componentRelease/{release_uuid}", json={ "release": TOMCAT_RELEASE, "latestCollection": LOG4J_COLLECTION, }, ) - responses.get(f"{BASE}/componentRelease/{release_uuid}/collection/latest", json=LOG4J_COLLECTION) + responses.get(f"{base_url}/componentRelease/{release_uuid}/collection/latest", json=LOG4J_COLLECTION) - with TeaClient(base_url=BASE) as client: + with TeaClient(base_url=base_url) as client: # Step 1: Get product product = client.get_product(product_uuid) assert product.name == "Apache Log4j 2" From 9eec887f60f5fb49022d33a7c85423ab13c66ab4 Mon Sep 17 00:00:00 2001 From: Rana Aurangzaib Date: Thu, 26 Feb 2026 00:00:43 +0300 Subject: [PATCH 14/17] Enhance documentation and validation in TEA API client - Expanded docstrings in `TeaHttpClient`, `TeaClient`, and models to include detailed argument descriptions, return types, and potential exceptions. - Improved validation in the `_validate_download_url` function to ensure proper URL formats. - Updated `parse_tei` and `fetch_well_known` functions in `discovery.py` to clarify their arguments and return values. - Added comprehensive docstrings for various models to enhance clarity on their purpose and usage. --- libtea/_http.py | 42 ++++++++++++- libtea/client.py | 149 +++++++++++++++++++++++++++++++++++++++++++- libtea/discovery.py | 36 ++++++++++- libtea/models.py | 72 ++++++++++++++++++++- 4 files changed, 290 insertions(+), 9 deletions(-) diff --git a/libtea/_http.py b/libtea/_http.py index 00779d1..8d94aef 100644 --- a/libtea/_http.py +++ b/libtea/_http.py @@ -91,7 +91,17 @@ def _validate_download_url(url: str) -> None: class TeaHttpClient: - """Low-level HTTP client for TEA API requests.""" + """Low-level HTTP client for TEA API requests. + + Handles authentication headers, error mapping, and streaming downloads. + Uses a separate unauthenticated session for artifact downloads to avoid + leaking bearer tokens to third-party hosts. + + Args: + base_url: TEA server base URL. + token: Optional bearer token. Rejected with plaintext HTTP. + timeout: Request timeout in seconds. + """ def __init__( self, @@ -121,7 +131,21 @@ def __init__( self._session.headers["authorization"] = f"Bearer {token}" def get_json(self, path: str, *, params: dict[str, Any] | None = None) -> Any: - """Send GET request and return parsed JSON.""" + """Send GET request and return parsed JSON. + + Args: + path: URL path relative to base URL (e.g. ``/product/{uuid}``). + params: Optional query parameters. + + Returns: + Parsed JSON response body. + + Raises: + TeaConnectionError: On network failure. + TeaNotFoundError: On HTTP 404. + TeaAuthenticationError: On HTTP 401/403. + TeaServerError: On HTTP 5xx. + """ url = f"{self._base_url}{path}" try: response = self._session.get(url, params=params, timeout=self._timeout, allow_redirects=False) @@ -142,10 +166,22 @@ def get_json(self, path: str, *, params: dict[str, Any] | None = None) -> Any: raise TeaValidationError(f"Invalid JSON in response: {exc}") from exc def download_with_hashes(self, url: str, dest: Path, algorithms: list[str] | None = None) -> dict[str, str]: - """Download a file and compute checksums on-the-fly. Returns {algorithm: hex_digest}. + """Download a file and compute checksums on-the-fly. Uses a separate unauthenticated session so that the bearer token is not leaked to third-party artifact hosts (CDNs, Maven Central, etc.). + + Args: + url: Direct download URL. + dest: Local file path to write to. Parent directories are created. + algorithms: Optional list of checksum algorithm names to compute. + + Returns: + Dict mapping algorithm name to hex digest string. + + Raises: + TeaConnectionError: On network failure. Partial files are deleted. + TeaChecksumError: If an unsupported algorithm is requested. """ _validate_download_url(url) hashers = _build_hashers(algorithms) if algorithms else {} diff --git a/libtea/client.py b/libtea/client.py index 02503db..5110a69 100644 --- a/libtea/client.py +++ b/libtea/client.py @@ -62,7 +62,13 @@ def _validate_path_segment(value: str, name: str = "uuid") -> str: class TeaClient: - """Synchronous client for the Transparency Exchange API.""" + """Synchronous client for the Transparency Exchange API. + + Args: + base_url: TEA server base URL (e.g. ``https://tea.example.com/v1``). + token: Optional bearer token for authentication. + timeout: Request timeout in seconds. + """ def __init__( self, @@ -91,6 +97,18 @@ def from_well_known( # --- Discovery --- def discover(self, tei: str) -> list[DiscoveryInfo]: + """Resolve a TEI to product release UUID(s) via the discovery endpoint. + + Args: + tei: TEI URN string (e.g. ``urn:tei:purl:example.com:pkg:pypi/lib@1.0``). + + Returns: + List of discovery results, each with a product release UUID and servers. + + Raises: + TeaValidationError: If the response is malformed. + TeaConnectionError: On network failure. + """ # requests auto-encodes query params — do NOT pre-encode with quote() data = self._http.get_json("/discovery", params={"tei": tei}) return _validate_list(DiscoveryInfo, data) @@ -108,12 +126,30 @@ def search_products( return _validate(PaginatedProductResponse, data) def get_product(self, uuid: str) -> Product: + """Get a TEA product by UUID. + + Args: + uuid: Product UUID. + + Returns: + The product with its identifiers. + """ data = self._http.get_json(f"/product/{_validate_path_segment(uuid)}") return _validate(Product, data) def get_product_releases( self, uuid: str, *, page_offset: int = 0, page_size: int = 100 ) -> PaginatedProductReleaseResponse: + """Get paginated releases for a product. + + Args: + uuid: Product UUID. + page_offset: Zero-based page offset. + page_size: Number of results per page. + + Returns: + Paginated response containing product releases. + """ data = self._http.get_json( f"/product/{_validate_path_segment(uuid)}/releases", params={"pageOffset": page_offset, "pageSize": page_size}, @@ -133,52 +169,144 @@ def search_product_releases( return _validate(PaginatedProductReleaseResponse, data) def get_product_release(self, uuid: str) -> ProductRelease: + """Get a product release by UUID. + + Args: + uuid: Product release UUID. + + Returns: + The product release with its component references. + """ data = self._http.get_json(f"/productRelease/{_validate_path_segment(uuid)}") return _validate(ProductRelease, data) def get_product_release_collection_latest(self, uuid: str) -> Collection: + """Get the latest collection for a product release. + + Args: + uuid: Product release UUID. + + Returns: + The latest collection with its artifacts. + """ data = self._http.get_json(f"/productRelease/{_validate_path_segment(uuid)}/collection/latest") return _validate(Collection, data) def get_product_release_collections(self, uuid: str) -> list[Collection]: + """Get all collection versions for a product release. + + Args: + uuid: Product release UUID. + + Returns: + List of all collection versions. + """ data = self._http.get_json(f"/productRelease/{_validate_path_segment(uuid)}/collections") return _validate_list(Collection, data) def get_product_release_collection(self, uuid: str, version: int) -> Collection: + """Get a specific collection version for a product release. + + Args: + uuid: Product release UUID. + version: Collection version number (starts at 1). + + Returns: + The requested collection version. + """ data = self._http.get_json(f"/productRelease/{_validate_path_segment(uuid)}/collection/{version}") return _validate(Collection, data) # --- Components --- def get_component(self, uuid: str) -> Component: + """Get a TEA component by UUID. + + Args: + uuid: Component UUID. + + Returns: + The component with its identifiers. + """ data = self._http.get_json(f"/component/{_validate_path_segment(uuid)}") return _validate(Component, data) def get_component_releases(self, uuid: str) -> list[Release]: + """Get all releases for a component. + + Unlike product releases, component releases are not paginated. + + Args: + uuid: Component UUID. + + Returns: + List of component releases. + """ data = self._http.get_json(f"/component/{_validate_path_segment(uuid)}/releases") return _validate_list(Release, data) # --- Component Releases --- def get_component_release(self, uuid: str) -> ComponentReleaseWithCollection: + """Get a component release with its latest collection. + + Args: + uuid: Component release UUID. + + Returns: + The release bundled with its latest collection of artifacts. + """ data = self._http.get_json(f"/componentRelease/{_validate_path_segment(uuid)}") return _validate(ComponentReleaseWithCollection, data) def get_component_release_collection_latest(self, uuid: str) -> Collection: + """Get the latest collection for a component release. + + Args: + uuid: Component release UUID. + + Returns: + The latest collection with its artifacts. + """ data = self._http.get_json(f"/componentRelease/{_validate_path_segment(uuid)}/collection/latest") return _validate(Collection, data) def get_component_release_collections(self, uuid: str) -> list[Collection]: + """Get all collection versions for a component release. + + Args: + uuid: Component release UUID. + + Returns: + List of all collection versions. + """ data = self._http.get_json(f"/componentRelease/{_validate_path_segment(uuid)}/collections") return _validate_list(Collection, data) def get_component_release_collection(self, uuid: str, version: int) -> Collection: + """Get a specific collection version for a component release. + + Args: + uuid: Component release UUID. + version: Collection version number (starts at 1). + + Returns: + The requested collection version. + """ data = self._http.get_json(f"/componentRelease/{_validate_path_segment(uuid)}/collection/{version}") return _validate(Collection, data) # --- Artifacts --- def get_artifact(self, uuid: str) -> Artifact: + """Get artifact metadata by UUID. + + Args: + uuid: Artifact UUID. + + Returns: + The artifact with its formats and download URLs. + """ data = self._http.get_json(f"/artifact/{_validate_path_segment(uuid)}") return _validate(Artifact, data) @@ -189,7 +317,24 @@ def download_artifact( *, verify_checksums: list[Checksum] | None = None, ) -> Path: - """Download an artifact file, optionally verifying checksums.""" + """Download an artifact file, optionally verifying checksums. + + Uses a separate unauthenticated session so the bearer token is not + leaked to third-party artifact hosts. + + Args: + url: Direct download URL for the artifact. + dest: Local file path to write to. + verify_checksums: Optional list of checksums to verify after download. + On mismatch the downloaded file is deleted. + + Returns: + The destination path. + + Raises: + TeaChecksumError: If checksum verification fails. + TeaConnectionError: On network failure. + """ algorithms = [cs.alg_type.value for cs in verify_checksums] if verify_checksums else None computed = self._http.download_with_hashes(url, dest, algorithms=algorithms) diff --git a/libtea/discovery.py b/libtea/discovery.py index f67825f..bb8ae26 100644 --- a/libtea/discovery.py +++ b/libtea/discovery.py @@ -103,8 +103,16 @@ def __str__(self) -> str: def parse_tei(tei: str) -> tuple[str, str, str]: """Parse a TEI URN into (type, domain, identifier). - TEI format: urn:tei::: - The identifier may contain colons (e.g. hash type). + TEI format: ``urn:tei:::`` + + Args: + tei: TEI URN string. + + Returns: + Tuple of (type, domain, identifier). + + Raises: + TeaDiscoveryError: If the TEI format is invalid. """ parts = tei.split(":") if len(parts) < 5 or parts[0] != "urn" or parts[1] != "tei": @@ -123,7 +131,19 @@ def parse_tei(tei: str) -> tuple[str, str, str]: def fetch_well_known(domain: str, *, timeout: float = 10.0) -> TeaWellKnown: - """Fetch and parse the .well-known/tea document from a domain via HTTPS.""" + """Fetch and parse the .well-known/tea discovery document from a domain. + + Args: + domain: Domain name to resolve (e.g. ``tea.example.com``). + timeout: HTTP request timeout in seconds. + + Returns: + Parsed well-known document with endpoint list. + + Raises: + TeaDiscoveryError: If the domain is invalid, unreachable, or returns + an invalid document. + """ if not domain or not _DOMAIN_RE.match(domain): raise TeaDiscoveryError(f"Invalid domain: {domain!r}") url = f"https://{domain}/.well-known/tea" @@ -162,6 +182,16 @@ def select_endpoint(well_known: TeaWellKnown, supported_version: str) -> TeaEndp Per TEA spec: uses SemVer 2.0.0 comparison to match versions, then prioritizes by highest matching version, with priority as tiebreaker. + + Args: + well_known: Parsed .well-known/tea document. + supported_version: SemVer version string the client supports. + + Returns: + The best matching endpoint. + + Raises: + TeaDiscoveryError: If no endpoint supports the requested version. """ target = _SemVer(supported_version) diff --git a/libtea/models.py b/libtea/models.py index 05b0ca6..1e71027 100644 --- a/libtea/models.py +++ b/libtea/models.py @@ -23,13 +23,21 @@ class _TeaModel(BaseModel): class IdentifierType(StrEnum): + """Identifier type used in product and component identifiers.""" + CPE = "CPE" TEI = "TEI" PURL = "PURL" - UDI = "UDI" + UDI = "UDI" # Not in spec's identifier-type enum; included for forward-compatibility class ChecksumAlgorithm(StrEnum): + """Checksum algorithm identifiers per TEA spec. + + Values use hyphen form (e.g. ``SHA-256``). The Checksum model's validator + normalizes underscore form (``SHA_256``) to hyphen form automatically. + """ + MD5 = "MD5" SHA_1 = "SHA-1" SHA_256 = "SHA-256" @@ -49,6 +57,8 @@ class ChecksumAlgorithm(StrEnum): class ArtifactType(StrEnum): + """Type of a TEA artifact (e.g. BOM, VEX, attestation).""" + ATTESTATION = "ATTESTATION" BOM = "BOM" BUILD_META = "BUILD_META" @@ -63,11 +73,15 @@ class ArtifactType(StrEnum): class CollectionBelongsTo(StrEnum): + """Whether a collection belongs to a component release or product release.""" + COMPONENT_RELEASE = "COMPONENT_RELEASE" PRODUCT_RELEASE = "PRODUCT_RELEASE" class CollectionUpdateReasonType(StrEnum): + """Reason for a collection version update.""" + INITIAL_RELEASE = "INITIAL_RELEASE" VEX_UPDATED = "VEX_UPDATED" ARTIFACT_UPDATED = "ARTIFACT_UPDATED" @@ -76,6 +90,8 @@ class CollectionUpdateReasonType(StrEnum): class ErrorType(StrEnum): + """TEA API error types returned in 404 responses.""" + OBJECT_UNKNOWN = "OBJECT_UNKNOWN" OBJECT_NOT_SHAREABLE = "OBJECT_NOT_SHAREABLE" @@ -84,11 +100,19 @@ class ErrorType(StrEnum): class Identifier(_TeaModel): + """An identifier with a specified type (e.g. PURL, CPE, TEI).""" + id_type: IdentifierType id_value: str class Checksum(_TeaModel): + """A checksum with algorithm type and hex value. + + The ``alg_type`` validator normalizes both hyphen form (``SHA-256``) and + underscore form (``SHA_256``) to the canonical hyphen form. + """ + alg_type: ChecksumAlgorithm alg_value: str @@ -111,6 +135,8 @@ def normalize_alg_type(cls, v: str) -> str: class ReleaseDistribution(_TeaModel): + """A distribution format for a component release (e.g. binary, source).""" + distribution_type: str description: str | None = None identifiers: list[Identifier] = [] @@ -120,6 +146,8 @@ class ReleaseDistribution(_TeaModel): class ArtifactFormat(_TeaModel): + """A TEA artifact in a specific format with download URL and checksums.""" + media_type: str description: str | None = None url: str @@ -128,6 +156,8 @@ class ArtifactFormat(_TeaModel): class Artifact(_TeaModel): + """A security-related artifact (e.g. SBOM, VEX, attestation) with available formats.""" + uuid: str name: str type: ArtifactType @@ -136,11 +166,19 @@ class Artifact(_TeaModel): class CollectionUpdateReason(_TeaModel): + """Reason for a collection version update, with optional comment.""" + type: CollectionUpdateReasonType comment: str | None = None class Collection(_TeaModel): + """A versioned collection of artifacts belonging to a release. + + The UUID matches the owning component or product release. The version + integer starts at 1 and increments on each content change. + """ + uuid: str version: int date: datetime | None = None @@ -150,17 +188,23 @@ class Collection(_TeaModel): class ComponentRef(_TeaModel): + """Reference to a TEA component, optionally pinned to a specific release.""" + uuid: str release: str | None = None class Component(_TeaModel): + """A TEA component (software lineage/family, not a specific version).""" + uuid: str name: str identifiers: list[Identifier] class Release(_TeaModel): + """A specific version of a TEA component with distributions and identifiers.""" + uuid: str component: str | None = None component_name: str | None = None @@ -173,17 +217,29 @@ class Release(_TeaModel): class ComponentReleaseWithCollection(_TeaModel): + """A component release bundled with its latest collection. + + Returned by ``GET /componentRelease/{uuid}``. + """ + release: Release latest_collection: Collection class Product(_TeaModel): + """A TEA product (optional grouping of components).""" + uuid: str name: str identifiers: list[Identifier] class ProductRelease(_TeaModel): + """A specific version of a TEA product with its component references. + + This is the primary entry point from TEI discovery. + """ + uuid: str product: str | None = None product_name: str | None = None @@ -196,6 +252,8 @@ class ProductRelease(_TeaModel): class ErrorResponse(_TeaModel): + """Error response body from TEA API 404 responses.""" + error: ErrorType @@ -203,6 +261,8 @@ class ErrorResponse(_TeaModel): class PaginatedProductResponse(_TeaModel): + """Paginated response containing a list of products.""" + timestamp: datetime page_start_index: int page_size: int @@ -211,6 +271,8 @@ class PaginatedProductResponse(_TeaModel): class PaginatedProductReleaseResponse(_TeaModel): + """Paginated response containing a list of product releases.""" + timestamp: datetime page_start_index: int page_size: int @@ -222,22 +284,30 @@ class PaginatedProductReleaseResponse(_TeaModel): class TeaEndpoint(_TeaModel): + """A TEA server endpoint from the .well-known/tea discovery document.""" + url: str versions: list[str] = Field(min_length=1) priority: float | None = Field(default=None, ge=0, le=1) class TeaWellKnown(_TeaModel): + """The .well-known/tea discovery document listing available TEA endpoints.""" + schema_version: Literal[1] endpoints: list[TeaEndpoint] = Field(min_length=1) class TeaServerInfo(_TeaModel): + """TEA server info returned from the discovery API endpoint.""" + root_url: str versions: list[str] = Field(min_length=1) priority: float | None = Field(default=None, ge=0, le=1) class DiscoveryInfo(_TeaModel): + """Discovery result mapping a TEI to a product release and its servers.""" + product_release_uuid: str servers: list[TeaServerInfo] From 905b152db6bfaae1e5f49a0e6c8bd1a64265766e Mon Sep 17 00:00:00 2001 From: Rana Aurangzaib Date: Thu, 26 Feb 2026 00:40:55 +0300 Subject: [PATCH 15/17] Implement TeiType enumeration and refactor checksum handling - Introduced `TeiType` enumeration in `models.py` to define valid TEI URN scheme types. - Updated references in `discovery.py`, `client.py`, and tests to use the new `TeiType` enumeration for improved clarity and maintainability. - Refactored checksum handling in `Checksum` model to replace `alg_type` and `alg_value` with `algorithm_type` and `algorithm_value`, ensuring consistency across the codebase. - Enhanced tests to validate the new checksum structure and TEI type usage. --- libtea/__init__.py | 2 ++ libtea/client.py | 6 +++--- libtea/discovery.py | 4 ++-- libtea/models.py | 27 +++++++++++++++++++++----- tests/test_discovery.py | 4 ++-- tests/test_download.py | 24 ++++++++++------------- tests/test_integration.py | 2 +- tests/test_models.py | 41 +++++++++++++++++++++++++++++++-------- 8 files changed, 75 insertions(+), 35 deletions(-) diff --git a/libtea/__init__.py b/libtea/__init__.py index 96b122d..69a42ca 100644 --- a/libtea/__init__.py +++ b/libtea/__init__.py @@ -39,6 +39,7 @@ Release, ReleaseDistribution, TeaServerInfo, + TeiType, ) __version__ = version("libtea") @@ -78,5 +79,6 @@ "Release", "ReleaseDistribution", "TeaServerInfo", + "TeiType", "__version__", ] diff --git a/libtea/client.py b/libtea/client.py index 5110a69..c73b848 100644 --- a/libtea/client.py +++ b/libtea/client.py @@ -335,7 +335,7 @@ def download_artifact( TeaChecksumError: If checksum verification fails. TeaConnectionError: On network failure. """ - algorithms = [cs.alg_type.value for cs in verify_checksums] if verify_checksums else None + algorithms = [cs.algorithm_type.value for cs in verify_checksums] if verify_checksums else None computed = self._http.download_with_hashes(url, dest, algorithms=algorithms) if verify_checksums: @@ -347,8 +347,8 @@ def download_artifact( def _verify_checksums(checksums: list[Checksum], computed: dict[str, str], url: str, dest: Path) -> None: """Verify computed checksums against expected values, cleaning up on failure.""" for cs in checksums: - alg_name = cs.alg_type.value - expected = cs.alg_value.lower() + alg_name = cs.algorithm_type.value + expected = cs.algorithm_value.lower() if alg_name not in computed: dest.unlink(missing_ok=True) raise TeaChecksumError( diff --git a/libtea/discovery.py b/libtea/discovery.py index bb8ae26..502d23c 100644 --- a/libtea/discovery.py +++ b/libtea/discovery.py @@ -9,7 +9,7 @@ from libtea._http import USER_AGENT from libtea.exceptions import TeaDiscoveryError -from libtea.models import TeaEndpoint, TeaWellKnown +from libtea.models import TeaEndpoint, TeaWellKnown, TeiType _SEMVER_RE = re.compile(r"^(?P\d+)\.(?P\d+)(?:\.(?P\d+))?(?:-(?P
[0-9A-Za-z.-]+))?$")
 
@@ -94,7 +94,7 @@ def __str__(self) -> str:
 
 logger = logging.getLogger("libtea")
 
-_VALID_TEI_TYPES = frozenset({"uuid", "purl", "hash", "swid", "eanupc", "gtin", "asin", "udi"})
+_VALID_TEI_TYPES = frozenset(e.value for e in TeiType)
 _DOMAIN_RE = re.compile(
     r"^[a-zA-Z0-9]([a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])?(\.[a-zA-Z0-9]([a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])?)*$"
 )
diff --git a/libtea/models.py b/libtea/models.py
index 1e71027..fe661b5 100644
--- a/libtea/models.py
+++ b/libtea/models.py
@@ -31,6 +31,23 @@ class IdentifierType(StrEnum):
     UDI = "UDI"  # Not in spec's identifier-type enum; included for forward-compatibility
 
 
+class TeiType(StrEnum):
+    """TEI URN scheme types per TEA discovery specification.
+
+    These are the valid ```` values in a TEI URN
+    (``urn:tei:::``).
+    """
+
+    UUID = "uuid"
+    PURL = "purl"
+    HASH = "hash"
+    SWID = "swid"
+    EANUPC = "eanupc"
+    GTIN = "gtin"
+    ASIN = "asin"
+    UDI = "udi"
+
+
 class ChecksumAlgorithm(StrEnum):
     """Checksum algorithm identifiers per TEA spec.
 
@@ -109,16 +126,16 @@ class Identifier(_TeaModel):
 class Checksum(_TeaModel):
     """A checksum with algorithm type and hex value.
 
-    The ``alg_type`` validator normalizes both hyphen form (``SHA-256``) and
+    The ``algorithm_type`` validator normalizes both hyphen form (``SHA-256``) and
     underscore form (``SHA_256``) to the canonical hyphen form.
     """
 
-    alg_type: ChecksumAlgorithm
-    alg_value: str
+    algorithm_type: ChecksumAlgorithm = Field(alias="algType")
+    algorithm_value: str = Field(alias="algValue")
 
-    @field_validator("alg_type", mode="before")
+    @field_validator("algorithm_type", mode="before")
     @classmethod
-    def normalize_alg_type(cls, v: str) -> str:
+    def normalize_algorithm_type(cls, v: str) -> str:
         """Normalize underscore form (SHA_256) to hyphen form (SHA-256).
 
         Uses member-name lookup instead of blind replace to handle
diff --git a/tests/test_discovery.py b/tests/test_discovery.py
index fc69933..c1db9d0 100644
--- a/tests/test_discovery.py
+++ b/tests/test_discovery.py
@@ -5,7 +5,7 @@
 
 from libtea.discovery import _SemVer, fetch_well_known, parse_tei, select_endpoint
 from libtea.exceptions import TeaDiscoveryError
-from libtea.models import TeaEndpoint, TeaWellKnown
+from libtea.models import TeaEndpoint, TeaWellKnown, TeiType
 
 
 class TestParseTei:
@@ -50,7 +50,7 @@ def test_invalid_tei_unknown_type(self):
         with pytest.raises(TeaDiscoveryError, match="Invalid TEI type"):
             parse_tei("urn:tei:unknown:example.com:some-id")
 
-    @pytest.mark.parametrize("tei_type", ["uuid", "purl", "hash", "swid", "eanupc", "gtin", "asin", "udi"])
+    @pytest.mark.parametrize("tei_type", [e.value for e in TeiType])
     def test_all_valid_tei_types(self, tei_type):
         result_type, domain, identifier = parse_tei(f"urn:tei:{tei_type}:example.com:some-id")
         assert result_type == tei_type
diff --git a/tests/test_download.py b/tests/test_download.py
index 222eb54..dcc20a1 100644
--- a/tests/test_download.py
+++ b/tests/test_download.py
@@ -23,7 +23,7 @@ def test_download_without_checksum(self, client, tmp_path):
     def test_download_with_valid_checksum(self, client, tmp_path):
         responses.get(ARTIFACT_URL, body=ARTIFACT_CONTENT)
         sha256 = hashlib.sha256(ARTIFACT_CONTENT).hexdigest()
-        checksums = [Checksum(alg_type=ChecksumAlgorithm.SHA_256, alg_value=sha256)]
+        checksums = [Checksum(algorithm_type=ChecksumAlgorithm.SHA_256, algorithm_value=sha256)]
         dest = tmp_path / "sbom.json"
         result = client.download_artifact(ARTIFACT_URL, dest, verify_checksums=checksums)
         assert result == dest
@@ -32,7 +32,7 @@ def test_download_with_valid_checksum(self, client, tmp_path):
     @responses.activate
     def test_download_with_invalid_checksum_deletes_file(self, client, tmp_path):
         responses.get(ARTIFACT_URL, body=ARTIFACT_CONTENT)
-        checksums = [Checksum(alg_type=ChecksumAlgorithm.SHA_256, alg_value="badhash")]
+        checksums = [Checksum(algorithm_type=ChecksumAlgorithm.SHA_256, algorithm_value="badhash")]
         dest = tmp_path / "sbom.json"
         with pytest.raises(TeaChecksumError, match="SHA-256") as exc_info:
             client.download_artifact(ARTIFACT_URL, dest, verify_checksums=checksums)
@@ -47,8 +47,8 @@ def test_download_with_multiple_checksums(self, client, tmp_path):
         sha256 = hashlib.sha256(ARTIFACT_CONTENT).hexdigest()
         sha1 = hashlib.sha1(ARTIFACT_CONTENT).hexdigest()
         checksums = [
-            Checksum(alg_type=ChecksumAlgorithm.SHA_1, alg_value=sha1),
-            Checksum(alg_type=ChecksumAlgorithm.SHA_256, alg_value=sha256),
+            Checksum(algorithm_type=ChecksumAlgorithm.SHA_1, algorithm_value=sha1),
+            Checksum(algorithm_type=ChecksumAlgorithm.SHA_256, algorithm_value=sha256),
         ]
         dest = tmp_path / "sbom.json"
         result = client.download_artifact(ARTIFACT_URL, dest, verify_checksums=checksums)
@@ -58,29 +58,25 @@ def test_download_with_multiple_checksums(self, client, tmp_path):
     def test_download_checksum_uppercase_hex_accepted(self, client, tmp_path):
         responses.get(ARTIFACT_URL, body=ARTIFACT_CONTENT)
         sha256 = hashlib.sha256(ARTIFACT_CONTENT).hexdigest().upper()
-        checksums = [Checksum(alg_type=ChecksumAlgorithm.SHA_256, alg_value=sha256)]
+        checksums = [Checksum(algorithm_type=ChecksumAlgorithm.SHA_256, algorithm_value=sha256)]
         dest = tmp_path / "sbom.json"
         result = client.download_artifact(ARTIFACT_URL, dest, verify_checksums=checksums)
         assert result == dest
 
     def test_download_with_blake3_raises_clear_error(self, client, tmp_path):
-        checksums = [Checksum(alg_type=ChecksumAlgorithm.BLAKE3, alg_value="somevalue")]
+        checksums = [Checksum(algorithm_type=ChecksumAlgorithm.BLAKE3, algorithm_value="somevalue")]
         dest = tmp_path / "sbom.json"
         with pytest.raises(TeaChecksumError, match="BLAKE3") as exc_info:
             client.download_artifact(ARTIFACT_URL, dest, verify_checksums=checksums)
         assert exc_info.value.algorithm == "BLAKE3"
 
-    @responses.activate
     def test_download_with_unknown_algorithm_raises_clear_error(self, client, tmp_path):
         """If an algorithm has no hashlib mapping, verification should raise explicitly."""
-        responses.get(ARTIFACT_URL, body=ARTIFACT_CONTENT)
-        checksums = [Checksum(alg_type=ChecksumAlgorithm.BLAKE3, alg_value="abc123")]
-        dest = tmp_path / "sbom.json"
         from unittest.mock import patch
 
-        # Patch download_with_hashes to return empty dict (no algorithms computed)
+        dest = tmp_path / "sbom.json"
+        checksums = [Checksum(algorithm_type=ChecksumAlgorithm.SHA_256, algorithm_value="abc123")]
         with patch.object(client._http, "download_with_hashes", return_value={}):
-            checksums = [Checksum(alg_type=ChecksumAlgorithm.SHA_256, alg_value="abc123")]
             with pytest.raises(TeaChecksumError, match="No computed digest") as exc_info:
                 client.download_artifact(ARTIFACT_URL, dest, verify_checksums=checksums)
             assert not dest.exists()
@@ -91,7 +87,7 @@ def test_download_zero_byte_artifact(self, client, tmp_path):
         """Zero-byte artifacts are valid (e.g. stub SBOMs)."""
         responses.get(ARTIFACT_URL, body=b"")
         sha256 = hashlib.sha256(b"").hexdigest()
-        checksums = [Checksum(alg_type=ChecksumAlgorithm.SHA_256, alg_value=sha256)]
+        checksums = [Checksum(algorithm_type=ChecksumAlgorithm.SHA_256, algorithm_value=sha256)]
         dest = tmp_path / "empty.json"
         result = client.download_artifact(ARTIFACT_URL, dest, verify_checksums=checksums)
         assert result == dest
@@ -103,7 +99,7 @@ def test_download_multi_chunk_artifact(self, client, tmp_path):
         content = b"A" * 20000
         responses.get(ARTIFACT_URL, body=content)
         sha256 = hashlib.sha256(content).hexdigest()
-        checksums = [Checksum(alg_type=ChecksumAlgorithm.SHA_256, alg_value=sha256)]
+        checksums = [Checksum(algorithm_type=ChecksumAlgorithm.SHA_256, algorithm_value=sha256)]
         dest = tmp_path / "large.json"
         result = client.download_artifact(ARTIFACT_URL, dest, verify_checksums=checksums)
         assert result == dest
diff --git a/tests/test_integration.py b/tests/test_integration.py
index 0c44d8f..2263c17 100644
--- a/tests/test_integration.py
+++ b/tests/test_integration.py
@@ -108,7 +108,7 @@ def test_full_consumer_flow(self, base_url):
             cr = client.get_component_release(release_uuid)
             assert cr.release.version == "11.0.7"
             assert cr.release.distributions[0].distribution_type == "zip"
-            assert cr.release.distributions[0].checksums[0].alg_type == ChecksumAlgorithm.SHA_256
+            assert cr.release.distributions[0].checksums[0].algorithm_type == ChecksumAlgorithm.SHA_256
 
             # Step 3: Get latest collection
             collection = client.get_component_release_collection_latest(release_uuid)
diff --git a/tests/test_models.py b/tests/test_models.py
index 52517fc..7248ab9 100644
--- a/tests/test_models.py
+++ b/tests/test_models.py
@@ -15,6 +15,7 @@
     PaginatedProductResponse,
     Product,
     Release,
+    TeiType,
 )
 
 
@@ -44,6 +45,16 @@ def test_collection_update_reason_type(self):
         assert CollectionUpdateReasonType.VEX_UPDATED == "VEX_UPDATED"
 
 
+class TestTeiType:
+    def test_all_members(self):
+        expected = {"uuid", "purl", "hash", "swid", "eanupc", "gtin", "asin", "udi"}
+        assert {e.value for e in TeiType} == expected
+
+    def test_is_strenum(self):
+        assert isinstance(TeiType.UUID, str)
+        assert TeiType.UUID == "uuid"
+
+
 class TestSharedTypes:
     def test_identifier_from_json(self):
         data = {"idType": "PURL", "idValue": "pkg:maven/org.apache/log4j"}
@@ -59,14 +70,14 @@ def test_identifier_to_json(self):
     def test_checksum_from_json(self):
         data = {"algType": "SHA-256", "algValue": "abcdef1234567890"}
         cs = Checksum.model_validate(data)
-        assert cs.alg_type == ChecksumAlgorithm.SHA_256
-        assert cs.alg_value == "abcdef1234567890"
+        assert cs.algorithm_type == ChecksumAlgorithm.SHA_256
+        assert cs.algorithm_value == "abcdef1234567890"
 
     def test_checksum_underscore_normalization(self):
         """Servers may use SHA_256 (underscore) instead of SHA-256 (hyphen)."""
         data = {"algType": "SHA_256", "algValue": "abcdef1234567890"}
         cs = Checksum.model_validate(data)
-        assert cs.alg_type == ChecksumAlgorithm.SHA_256
+        assert cs.algorithm_type == ChecksumAlgorithm.SHA_256
 
     def test_enum_is_strenum(self):
         assert isinstance(IdentifierType.CPE, str)
@@ -74,14 +85,24 @@ def test_enum_is_strenum(self):
         assert isinstance(ArtifactType.BOM, str)
 
     def test_checksum_to_json(self):
-        cs = Checksum(alg_type=ChecksumAlgorithm.SHA_256, alg_value="abcdef1234567890")
+        cs = Checksum(algorithm_type=ChecksumAlgorithm.SHA_256, algorithm_value="abcdef1234567890")
         data = cs.model_dump(by_alias=True)
         assert data == {"algType": "SHA-256", "algValue": "abcdef1234567890"}
 
+    def test_checksum_json_round_trip(self):
+        cs = Checksum(algorithm_type=ChecksumAlgorithm.SHA_256, algorithm_value="abcdef1234567890")
+        json_str = cs.model_dump_json(by_alias=True)
+        restored = Checksum.model_validate_json(json_str)
+        assert restored == cs
+
     def test_populate_by_name(self):
         ident = Identifier.model_validate({"id_type": "TEI", "id_value": "tei:example"})
         assert ident.id_type == IdentifierType.TEI
 
+    def test_checksum_populate_by_name(self):
+        cs = Checksum.model_validate({"algorithm_type": "SHA-256", "algorithm_value": "abcdef"})
+        assert cs.algorithm_type == ChecksumAlgorithm.SHA_256
+
     def test_extra_fields_ignored(self):
         cs = Checksum.model_validate({"algType": "SHA-256", "algValue": "deadbeef", "extra": "ignored"})
         assert not hasattr(cs, "extra")
@@ -105,12 +126,12 @@ class TestChecksumNormalization:
     )
     def test_underscore_to_value(self, raw, expected_member):
         cs = Checksum.model_validate({"algType": raw, "algValue": "aabbcc"})
-        assert cs.alg_type == expected_member
+        assert cs.algorithm_type == expected_member
 
     def test_valid_values_pass_through(self):
         for member in ChecksumAlgorithm:
             cs = Checksum.model_validate({"algType": member.value, "algValue": "aabbcc"})
-            assert cs.alg_type == member
+            assert cs.algorithm_type == member
 
 
 class TestValidationErrors:
@@ -122,7 +143,11 @@ def test_identifier_rejects_unknown_type(self):
         with pytest.raises(ValidationError):
             Identifier.model_validate({"idType": "SPDXID", "idValue": "some-value"})
 
-    def test_checksum_rejects_missing_alg_value(self):
+    def test_checksum_rejects_missing_algorithm_type(self):
+        with pytest.raises(ValidationError):
+            Checksum.model_validate({"algValue": "abcdef1234567890"})
+
+    def test_checksum_rejects_missing_algorithm_value(self):
         with pytest.raises(ValidationError):
             Checksum.model_validate({"algType": "SHA-256"})
 
@@ -233,7 +258,7 @@ def test_release_from_json(self):
         release = Release.model_validate(data)
         assert release.version == "11.0.7"
         assert release.distributions[0].distribution_type == "zip"
-        assert release.distributions[0].checksums[0].alg_type == ChecksumAlgorithm.SHA_256
+        assert release.distributions[0].checksums[0].algorithm_type == ChecksumAlgorithm.SHA_256
 
 
 class TestCollection:

From 0b118aa6d54ff9178af925546e4023e25d41cd2b Mon Sep 17 00:00:00 2001
From: Rana Aurangzaib 
Date: Thu, 26 Feb 2026 01:05:45 +0300
Subject: [PATCH 16/17] Update dependencies, enhance README, and improve
 validation in API client

- Updated `requests` and `pydantic` dependencies to their latest versions in `pyproject.toml` and `uv.lock`.
- Enhanced README to clarify artifact download features, including checksum verification methods and bearer token isolation.
- Improved validation in the API client to reject non-list responses in `_validate_list`, ensuring robust error handling.
- Added tests to validate the new list validation logic, enhancing overall test coverage.
---
 .github/workflows/ci.yaml     |  4 +-
 .github/workflows/codeql.yaml |  8 ++--
 .github/workflows/pypi.yaml   | 12 +++---
 README.md                     | 71 ++++++++++++++++++++++++++++-------
 libtea/client.py              |  4 +-
 pyproject.toml                | 14 +++----
 tests/test_client.py          |  6 +++
 uv.lock                       | 66 ++++++++++++++++----------------
 8 files changed, 119 insertions(+), 66 deletions(-)

diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 4a50191..b0b7a64 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -12,8 +12,8 @@ jobs:
       matrix:
         python-version: ["3.11", "3.12", "3.13"]
     steps:
-      - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
-      - uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5.4.1
+      - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+      - uses: astral-sh/setup-uv@eac588ad8def6316056a12d4907a9d4d84ff7a3b # v7.3.0
       - run: uv python install ${{ matrix.python-version }}
       - run: uv sync
       - run: uv run ruff check .
diff --git a/.github/workflows/codeql.yaml b/.github/workflows/codeql.yaml
index 3201701..ebbadff 100644
--- a/.github/workflows/codeql.yaml
+++ b/.github/workflows/codeql.yaml
@@ -16,15 +16,15 @@ jobs:
       matrix:
         language: [python]
     steps:
-      - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+      - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
 
       - name: Initialize CodeQL
-        uses: github/codeql-action/init@48ab28a6f5dbc2a99bf1e0131198dd8f1df78169 # v3.28.0
+        uses: github/codeql-action/init@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
         with:
           languages: ${{ matrix.language }}
 
       - name: Autobuild
-        uses: github/codeql-action/autobuild@48ab28a6f5dbc2a99bf1e0131198dd8f1df78169 # v3.28.0
+        uses: github/codeql-action/autobuild@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
 
       - name: Perform CodeQL Analysis
-        uses: github/codeql-action/analyze@48ab28a6f5dbc2a99bf1e0131198dd8f1df78169 # v3.28.0
+        uses: github/codeql-action/analyze@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
diff --git a/.github/workflows/pypi.yaml b/.github/workflows/pypi.yaml
index 87be0b8..c02fb59 100644
--- a/.github/workflows/pypi.yaml
+++ b/.github/workflows/pypi.yaml
@@ -17,9 +17,9 @@ jobs:
     permissions:
       id-token: write
     steps:
-      - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+      - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
 
-      - uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5.4.1
+      - uses: astral-sh/setup-uv@eac588ad8def6316056a12d4907a9d4d84ff7a3b # v7.3.0
 
       - name: Determine version
         id: version
@@ -37,7 +37,7 @@ jobs:
         run: uv build
 
       - name: Publish to TestPyPI
-        uses: pypa/gh-action-pypi-publish@76f52bc884231f62b54f72e44af3222236a5b286 # release/v1
+        uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
         with:
           repository-url: https://test.pypi.org/legacy/
 
@@ -50,12 +50,12 @@ jobs:
     permissions:
       id-token: write
     steps:
-      - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+      - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
 
-      - uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5.4.1
+      - uses: astral-sh/setup-uv@eac588ad8def6316056a12d4907a9d4d84ff7a3b # v7.3.0
 
       - name: Build package
         run: uv build
 
       - name: Publish to PyPI
-        uses: pypa/gh-action-pypi-publish@76f52bc884231f62b54f72e44af3222236a5b286 # release/v1
+        uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
diff --git a/README.md b/README.md
index e4f9c5d..2d7da43 100644
--- a/README.md
+++ b/README.md
@@ -18,9 +18,10 @@ TEA is an open standard for discovering and retrieving software transparency art
 - Auto-discovery via `.well-known/tea` and TEI URNs
 - Products, components, releases, and versioned collections
 - Search by PURL, CPE, or TEI identifier
-- Artifact download with on-the-fly checksum verification
+- Artifact download with on-the-fly checksum verification (MD5 through BLAKE2b)
 - Typed Pydantic v2 models with full camelCase/snake_case conversion
 - Structured exception hierarchy with error context
+- Bearer token isolation — tokens are never sent to artifact download hosts
 
 ## Installation
 
@@ -48,7 +49,22 @@ with TeaClient.from_well_known("example.com", token="your-bearer-token") as clie
 Or connect directly to a known endpoint:
 
 ```python
-client = TeaClient(base_url="https://api.example.com/tea/v0.3.0-beta.2")
+client = TeaClient(
+    base_url="https://api.example.com/tea/v0.3.0-beta.2",
+    token="your-bearer-token",
+    timeout=30.0,
+)
+```
+
+Using `from_well_known`, you can also override the spec version and timeout:
+
+```python
+client = TeaClient.from_well_known(
+    "example.com",
+    token="your-bearer-token",
+    timeout=15.0,
+    version="0.3.0-beta.2",  # default
+)
 ```
 
 ## Usage
@@ -62,8 +78,11 @@ with TeaClient.from_well_known("example.com") as client:
     for product in results.results:
         print(product.name, product.uuid)
 
-    # Search product releases
-    releases = client.search_product_releases("PURL", "pkg:pypi/requests@2.31.0")
+    # Search product releases (with pagination)
+    releases = client.search_product_releases(
+        "PURL", "pkg:pypi/requests@2.31.0",
+        page_offset=0, page_size=100,
+    )
     print(releases.total_results)
 ```
 
@@ -77,12 +96,21 @@ with TeaClient.from_well_known("example.com") as client:
     releases = client.get_product_releases("product-uuid", page_size=25)
     for release in releases.results:
         print(release.version, release.created_date)
+
+    # Single product release
+    pr = client.get_product_release("release-uuid")
+    print(pr.version, pr.components)
+
+    # Product release collections
+    latest = client.get_product_release_collection_latest("release-uuid")
+    all_versions = client.get_product_release_collections("release-uuid")
+    specific = client.get_product_release_collection("release-uuid", 3)
 ```
 
 ### Components
 
 ```python
-with TeaClient(base_url="https://api.example.com/tea/v1") as client:
+with TeaClient(base_url="https://api.example.com/tea/v0.3.0-beta.2") as client:
     component = client.get_component("component-uuid")
     releases = client.get_component_releases("component-uuid")
 
@@ -94,11 +122,14 @@ with TeaClient(base_url="https://api.example.com/tea/v1") as client:
 ### Collections and artifacts
 
 ```python
-with TeaClient(base_url="https://api.example.com/tea/v1") as client:
+with TeaClient(base_url="https://api.example.com/tea/v0.3.0-beta.2") as client:
     collection = client.get_component_release_collection_latest("release-uuid")
     for artifact in collection.artifacts:
         print(artifact.name, artifact.type)
 
+    # All collection versions for a component release
+    all_versions = client.get_component_release_collections("release-uuid")
+
     # Specific collection version
     collection_v3 = client.get_component_release_collection("release-uuid", 3)
 ```
@@ -108,35 +139,46 @@ with TeaClient(base_url="https://api.example.com/tea/v1") as client:
 ```python
 from pathlib import Path
 
-with TeaClient(base_url="https://api.example.com/tea/v1") as client:
+with TeaClient(base_url="https://api.example.com/tea/v0.3.0-beta.2") as client:
     artifact = client.get_artifact("artifact-uuid")
     fmt = artifact.formats[0]
 
-    # Downloads and verifies checksums on-the-fly
-    client.download_artifact(
+    # Downloads and verifies checksums on-the-fly; returns the dest path
+    path = client.download_artifact(
         fmt.url,
         Path("sbom.json"),
         verify_checksums=fmt.checksums,
     )
 ```
 
+Supported checksum algorithms: MD5, SHA-1, SHA-256, SHA-384, SHA-512, SHA3-256, SHA3-384, SHA3-512, BLAKE2b-256, BLAKE2b-384, BLAKE2b-512. BLAKE3 is recognized in the model but not verifiable (Python's `hashlib` has no BLAKE3 support — a clear error is raised).
+
+Artifact downloads use a separate unauthenticated HTTP session so the bearer token is never leaked to third-party hosts (CDNs, Maven Central, etc.). On checksum mismatch, the downloaded file is automatically deleted.
+
 ### Discovery
 
 ```python
-from libtea.discovery import parse_tei
+from libtea.discovery import parse_tei, fetch_well_known, select_endpoint
 
 # Parse a TEI URN
 tei_type, domain, identifier = parse_tei(
     "urn:tei:purl:cyclonedx.org:pkg:pypi/cyclonedx-python-lib@8.4.0"
 )
 
+# Low-level: fetch and select an endpoint manually
+well_known = fetch_well_known("example.com")
+endpoint = select_endpoint(well_known, "0.3.0-beta.2")
+print(endpoint.url, endpoint.priority)
+
 # Discover product releases by TEI
-with TeaClient(base_url="https://api.example.com/tea/v1") as client:
+with TeaClient(base_url="https://api.example.com/tea/v0.3.0-beta.2") as client:
     results = client.discover("urn:tei:uuid:example.com:d4d9f54a-abcf-11ee-ac79-1a52914d44b")
     for info in results:
         print(info.product_release_uuid, info.servers)
 ```
 
+Supported TEI types: `uuid`, `purl`, `hash`, `swid`, `eanupc`, `gtin`, `asin`, `udi`.
+
 ## Error handling
 
 All exceptions inherit from `TeaError`:
@@ -164,12 +206,15 @@ Exception hierarchy:
 | `TeaDiscoveryError` | Invalid TEI, `.well-known` failure, or no compatible endpoint |
 | `TeaChecksumError` | Checksum mismatch (`.algorithm`, `.expected`, `.actual`) |
 | `TeaValidationError` | Malformed server response |
+| `TeaInsecureTransportWarning` | Warning emitted when using plaintext HTTP |
+
+Using a bearer token over plaintext HTTP raises `ValueError` immediately — HTTPS is required for authenticated requests.
 
 ## Requirements
 
 - Python >= 3.11
-- [requests](https://requests.readthedocs.io/) for HTTP
-- [Pydantic](https://docs.pydantic.dev/) v2 for data models
+- [requests](https://requests.readthedocs.io/) >= 2.31.0 for HTTP
+- [Pydantic](https://docs.pydantic.dev/) >= 2.1.0 for data models
 
 ## Not yet supported
 
diff --git a/libtea/client.py b/libtea/client.py
index c73b848..066d25c 100644
--- a/libtea/client.py
+++ b/libtea/client.py
@@ -44,8 +44,10 @@ def _validate(model_cls: type[_M], data: Any) -> _M:
         raise TeaValidationError(f"Invalid {model_cls.__name__} response: {exc}") from exc
 
 
-def _validate_list(model_cls: type[_M], data: list[Any]) -> list[_M]:
+def _validate_list(model_cls: type[_M], data: Any) -> list[_M]:
     """Validate a list of items against a Pydantic model."""
+    if not isinstance(data, list):
+        raise TeaValidationError(f"Expected list for {model_cls.__name__}, got {type(data).__name__}")
     try:
         return [model_cls.model_validate(item) for item in data]
     except ValidationError as exc:
diff --git a/pyproject.toml b/pyproject.toml
index f1a3f4c..1de2b0f 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -20,8 +20,8 @@ classifiers = [
     "Topic :: Software Development :: Libraries :: Python Modules",
 ]
 dependencies = [
-    "requests>=2.31.0,<3",
-    "pydantic>=2.1.0,<3",
+    "requests>=2.32.0,<3",
+    "pydantic>=2.12.0,<3",
 ]
 
 [project.urls]
@@ -33,11 +33,11 @@ Changelog = "https://github.com/sbomify/py-libtea/releases"
 
 [dependency-groups]
 dev = [
-    "pytest>=8.0.0,<9",
-    "pytest-cov>=4.1.0,<5",
-    "ruff>=0.12.0,<0.13",
-    "pre-commit>=4.2.0,<5",
-    "responses>=0.25.0,<1",
+    "pytest>=9.0.0,<10",
+    "pytest-cov>=7.0.0,<8",
+    "ruff>=0.15.0,<0.16",
+    "pre-commit>=4.5.0,<5",
+    "responses>=0.26.0,<1",
 ]
 
 [tool.hatch.build.targets.wheel]
diff --git a/tests/test_client.py b/tests/test_client.py
index b01691d..736339a 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -419,6 +419,12 @@ def test_validate_list_raises_tea_validation_error(self, client, base_url):
         with pytest.raises(TeaValidationError, match="Invalid Release response"):
             client.get_component_releases("comp-1")
 
+    @responses.activate
+    def test_validate_list_rejects_non_list_response(self, client, base_url):
+        responses.get(f"{base_url}/component/comp-1/releases", json={"not": "a list"})
+        with pytest.raises(TeaValidationError, match="Expected list"):
+            client.get_component_releases("comp-1")
+
 
 class TestValidatePathSegment:
     def test_accepts_uuid(self):
diff --git a/uv.lock b/uv.lock
index 84a3a7c..3d65f79 100644
--- a/uv.lock
+++ b/uv.lock
@@ -280,17 +280,17 @@ dev = [
 
 [package.metadata]
 requires-dist = [
-    { name = "pydantic", specifier = ">=2.1.0,<3" },
-    { name = "requests", specifier = ">=2.31.0,<3" },
+    { name = "pydantic", specifier = ">=2.12.0,<3" },
+    { name = "requests", specifier = ">=2.32.0,<3" },
 ]
 
 [package.metadata.requires-dev]
 dev = [
-    { name = "pre-commit", specifier = ">=4.2.0,<5" },
-    { name = "pytest", specifier = ">=8.0.0,<9" },
-    { name = "pytest-cov", specifier = ">=4.1.0,<5" },
-    { name = "responses", specifier = ">=0.25.0,<1" },
-    { name = "ruff", specifier = ">=0.12.0,<0.13" },
+    { name = "pre-commit", specifier = ">=4.5.0,<5" },
+    { name = "pytest", specifier = ">=9.0.0,<10" },
+    { name = "pytest-cov", specifier = ">=7.0.0,<8" },
+    { name = "responses", specifier = ">=0.26.0,<1" },
+    { name = "ruff", specifier = ">=0.15.0,<0.16" },
 ]
 
 [[package]]
@@ -468,7 +468,7 @@ wheels = [
 
 [[package]]
 name = "pytest"
-version = "8.4.2"
+version = "9.0.2"
 source = { registry = "https://pypi.org/simple" }
 dependencies = [
     { name = "colorama", marker = "sys_platform == 'win32'" },
@@ -477,22 +477,23 @@ dependencies = [
     { name = "pluggy" },
     { name = "pygments" },
 ]
-sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" }
 wheels = [
-    { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" },
+    { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" },
 ]
 
 [[package]]
 name = "pytest-cov"
-version = "4.1.0"
+version = "7.0.0"
 source = { registry = "https://pypi.org/simple" }
 dependencies = [
     { name = "coverage", extra = ["toml"] },
+    { name = "pluggy" },
     { name = "pytest" },
 ]
-sdist = { url = "https://files.pythonhosted.org/packages/7a/15/da3df99fd551507694a9b01f512a2f6cf1254f33601605843c3775f39460/pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6", size = 63245, upload-time = "2023-05-24T18:44:56.845Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" }
 wheels = [
-    { url = "https://files.pythonhosted.org/packages/a7/4b/8b78d126e275efa2379b1c2e09dc52cf70df16fc3b90613ef82531499d73/pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a", size = 21949, upload-time = "2023-05-24T18:44:54.079Z" },
+    { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" },
 ]
 
 [[package]]
@@ -581,28 +582,27 @@ wheels = [
 
 [[package]]
 name = "ruff"
-version = "0.12.12"
+version = "0.15.2"
 source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/a8/f0/e0965dd709b8cabe6356811c0ee8c096806bb57d20b5019eb4e48a117410/ruff-0.12.12.tar.gz", hash = "sha256:b86cd3415dbe31b3b46a71c598f4c4b2f550346d1ccf6326b347cc0c8fd063d6", size = 5359915, upload-time = "2025-09-04T16:50:18.273Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/06/04/eab13a954e763b0606f460443fcbf6bb5a0faf06890ea3754ff16523dce5/ruff-0.15.2.tar.gz", hash = "sha256:14b965afee0969e68bb871eba625343b8673375f457af4abe98553e8bbb98342", size = 4558148, upload-time = "2026-02-19T22:32:20.271Z" }
 wheels = [
-    { url = "https://files.pythonhosted.org/packages/09/79/8d3d687224d88367b51c7974cec1040c4b015772bfbeffac95face14c04a/ruff-0.12.12-py3-none-linux_armv6l.whl", hash = "sha256:de1c4b916d98ab289818e55ce481e2cacfaad7710b01d1f990c497edf217dafc", size = 12116602, upload-time = "2025-09-04T16:49:18.892Z" },
-    { url = "https://files.pythonhosted.org/packages/c3/c3/6e599657fe192462f94861a09aae935b869aea8a1da07f47d6eae471397c/ruff-0.12.12-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:7acd6045e87fac75a0b0cdedacf9ab3e1ad9d929d149785903cff9bb69ad9727", size = 12868393, upload-time = "2025-09-04T16:49:23.043Z" },
-    { url = "https://files.pythonhosted.org/packages/e8/d2/9e3e40d399abc95336b1843f52fc0daaceb672d0e3c9290a28ff1a96f79d/ruff-0.12.12-py3-none-macosx_11_0_arm64.whl", hash = "sha256:abf4073688d7d6da16611f2f126be86523a8ec4343d15d276c614bda8ec44edb", size = 12036967, upload-time = "2025-09-04T16:49:26.04Z" },
-    { url = "https://files.pythonhosted.org/packages/e9/03/6816b2ed08836be272e87107d905f0908be5b4a40c14bfc91043e76631b8/ruff-0.12.12-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:968e77094b1d7a576992ac078557d1439df678a34c6fe02fd979f973af167577", size = 12276038, upload-time = "2025-09-04T16:49:29.056Z" },
-    { url = "https://files.pythonhosted.org/packages/9f/d5/707b92a61310edf358a389477eabd8af68f375c0ef858194be97ca5b6069/ruff-0.12.12-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42a67d16e5b1ffc6d21c5f67851e0e769517fb57a8ebad1d0781b30888aa704e", size = 11901110, upload-time = "2025-09-04T16:49:32.07Z" },
-    { url = "https://files.pythonhosted.org/packages/9d/3d/f8b1038f4b9822e26ec3d5b49cf2bc313e3c1564cceb4c1a42820bf74853/ruff-0.12.12-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b216ec0a0674e4b1214dcc998a5088e54eaf39417327b19ffefba1c4a1e4971e", size = 13668352, upload-time = "2025-09-04T16:49:35.148Z" },
-    { url = "https://files.pythonhosted.org/packages/98/0e/91421368ae6c4f3765dd41a150f760c5f725516028a6be30e58255e3c668/ruff-0.12.12-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:59f909c0fdd8f1dcdbfed0b9569b8bf428cf144bec87d9de298dcd4723f5bee8", size = 14638365, upload-time = "2025-09-04T16:49:38.892Z" },
-    { url = "https://files.pythonhosted.org/packages/74/5d/88f3f06a142f58ecc8ecb0c2fe0b82343e2a2b04dcd098809f717cf74b6c/ruff-0.12.12-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ac93d87047e765336f0c18eacad51dad0c1c33c9df7484c40f98e1d773876f5", size = 14060812, upload-time = "2025-09-04T16:49:42.732Z" },
-    { url = "https://files.pythonhosted.org/packages/13/fc/8962e7ddd2e81863d5c92400820f650b86f97ff919c59836fbc4c1a6d84c/ruff-0.12.12-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:01543c137fd3650d322922e8b14cc133b8ea734617c4891c5a9fccf4bfc9aa92", size = 13050208, upload-time = "2025-09-04T16:49:46.434Z" },
-    { url = "https://files.pythonhosted.org/packages/53/06/8deb52d48a9a624fd37390555d9589e719eac568c020b27e96eed671f25f/ruff-0.12.12-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afc2fa864197634e549d87fb1e7b6feb01df0a80fd510d6489e1ce8c0b1cc45", size = 13311444, upload-time = "2025-09-04T16:49:49.931Z" },
-    { url = "https://files.pythonhosted.org/packages/2a/81/de5a29af7eb8f341f8140867ffb93f82e4fde7256dadee79016ac87c2716/ruff-0.12.12-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:0c0945246f5ad776cb8925e36af2438e66188d2b57d9cf2eed2c382c58b371e5", size = 13279474, upload-time = "2025-09-04T16:49:53.465Z" },
-    { url = "https://files.pythonhosted.org/packages/7f/14/d9577fdeaf791737ada1b4f5c6b59c21c3326f3f683229096cccd7674e0c/ruff-0.12.12-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a0fbafe8c58e37aae28b84a80ba1817f2ea552e9450156018a478bf1fa80f4e4", size = 12070204, upload-time = "2025-09-04T16:49:56.882Z" },
-    { url = "https://files.pythonhosted.org/packages/77/04/a910078284b47fad54506dc0af13839c418ff704e341c176f64e1127e461/ruff-0.12.12-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b9c456fb2fc8e1282affa932c9e40f5ec31ec9cbb66751a316bd131273b57c23", size = 11880347, upload-time = "2025-09-04T16:49:59.729Z" },
-    { url = "https://files.pythonhosted.org/packages/df/58/30185fcb0e89f05e7ea82e5817b47798f7fa7179863f9d9ba6fd4fe1b098/ruff-0.12.12-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5f12856123b0ad0147d90b3961f5c90e7427f9acd4b40050705499c98983f489", size = 12891844, upload-time = "2025-09-04T16:50:02.591Z" },
-    { url = "https://files.pythonhosted.org/packages/21/9c/28a8dacce4855e6703dcb8cdf6c1705d0b23dd01d60150786cd55aa93b16/ruff-0.12.12-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:26a1b5a2bf7dd2c47e3b46d077cd9c0fc3b93e6c6cc9ed750bd312ae9dc302ee", size = 13360687, upload-time = "2025-09-04T16:50:05.8Z" },
-    { url = "https://files.pythonhosted.org/packages/c8/fa/05b6428a008e60f79546c943e54068316f32ec8ab5c4f73e4563934fbdc7/ruff-0.12.12-py3-none-win32.whl", hash = "sha256:173be2bfc142af07a01e3a759aba6f7791aa47acf3604f610b1c36db888df7b1", size = 12052870, upload-time = "2025-09-04T16:50:09.121Z" },
-    { url = "https://files.pythonhosted.org/packages/85/60/d1e335417804df452589271818749d061b22772b87efda88354cf35cdb7a/ruff-0.12.12-py3-none-win_amd64.whl", hash = "sha256:e99620bf01884e5f38611934c09dd194eb665b0109104acae3ba6102b600fd0d", size = 13178016, upload-time = "2025-09-04T16:50:12.559Z" },
-    { url = "https://files.pythonhosted.org/packages/28/7e/61c42657f6e4614a4258f1c3b0c5b93adc4d1f8575f5229d1906b483099b/ruff-0.12.12-py3-none-win_arm64.whl", hash = "sha256:2a8199cab4ce4d72d158319b63370abf60991495fb733db96cd923a34c52d093", size = 12256762, upload-time = "2025-09-04T16:50:15.737Z" },
+    { url = "https://files.pythonhosted.org/packages/2f/70/3a4dc6d09b13cb3e695f28307e5d889b2e1a66b7af9c5e257e796695b0e6/ruff-0.15.2-py3-none-linux_armv6l.whl", hash = "sha256:120691a6fdae2f16d65435648160f5b81a9625288f75544dc40637436b5d3c0d", size = 10430565, upload-time = "2026-02-19T22:32:41.824Z" },
+    { url = "https://files.pythonhosted.org/packages/71/0b/bb8457b56185ece1305c666dc895832946d24055be90692381c31d57466d/ruff-0.15.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:a89056d831256099658b6bba4037ac6dd06f49d194199215befe2bb10457ea5e", size = 10820354, upload-time = "2026-02-19T22:32:07.366Z" },
+    { url = "https://files.pythonhosted.org/packages/2d/c1/e0532d7f9c9e0b14c46f61b14afd563298b8b83f337b6789ddd987e46121/ruff-0.15.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e36dee3a64be0ebd23c86ffa3aa3fd3ac9a712ff295e192243f814a830b6bd87", size = 10170767, upload-time = "2026-02-19T22:32:13.188Z" },
+    { url = "https://files.pythonhosted.org/packages/47/e8/da1aa341d3af017a21c7a62fb5ec31d4e7ad0a93ab80e3a508316efbcb23/ruff-0.15.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9fb47b6d9764677f8c0a193c0943ce9a05d6763523f132325af8a858eadc2b9", size = 10529591, upload-time = "2026-02-19T22:32:02.547Z" },
+    { url = "https://files.pythonhosted.org/packages/93/74/184fbf38e9f3510231fbc5e437e808f0b48c42d1df9434b208821efcd8d6/ruff-0.15.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f376990f9d0d6442ea9014b19621d8f2aaf2b8e39fdbfc79220b7f0c596c9b80", size = 10260771, upload-time = "2026-02-19T22:32:36.938Z" },
+    { url = "https://files.pythonhosted.org/packages/05/ac/605c20b8e059a0bc4b42360414baa4892ff278cec1c91fff4be0dceedefd/ruff-0.15.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2dcc987551952d73cbf5c88d9fdee815618d497e4df86cd4c4824cc59d5dd75f", size = 11045791, upload-time = "2026-02-19T22:32:31.642Z" },
+    { url = "https://files.pythonhosted.org/packages/fd/52/db6e419908f45a894924d410ac77d64bdd98ff86901d833364251bd08e22/ruff-0.15.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42a47fd785cbe8c01b9ff45031af875d101b040ad8f4de7bbb716487c74c9a77", size = 11879271, upload-time = "2026-02-19T22:32:29.305Z" },
+    { url = "https://files.pythonhosted.org/packages/3e/d8/7992b18f2008bdc9231d0f10b16df7dda964dbf639e2b8b4c1b4e91b83af/ruff-0.15.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cbe9f49354866e575b4c6943856989f966421870e85cd2ac94dccb0a9dcb2fea", size = 11303707, upload-time = "2026-02-19T22:32:22.492Z" },
+    { url = "https://files.pythonhosted.org/packages/d7/02/849b46184bcfdd4b64cde61752cc9a146c54759ed036edd11857e9b8443b/ruff-0.15.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7a672c82b5f9887576087d97be5ce439f04bbaf548ee987b92d3a7dede41d3a", size = 11149151, upload-time = "2026-02-19T22:32:44.234Z" },
+    { url = "https://files.pythonhosted.org/packages/70/04/f5284e388bab60d1d3b99614a5a9aeb03e0f333847e2429bebd2aaa1feec/ruff-0.15.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:72ecc64f46f7019e2bcc3cdc05d4a7da958b629a5ab7033195e11a438403d956", size = 11091132, upload-time = "2026-02-19T22:32:24.691Z" },
+    { url = "https://files.pythonhosted.org/packages/fa/ae/88d844a21110e14d92cf73d57363fab59b727ebeabe78009b9ccb23500af/ruff-0.15.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:8dcf243b15b561c655c1ef2f2b0050e5d50db37fe90115507f6ff37d865dc8b4", size = 10504717, upload-time = "2026-02-19T22:32:26.75Z" },
+    { url = "https://files.pythonhosted.org/packages/64/27/867076a6ada7f2b9c8292884ab44d08fd2ba71bd2b5364d4136f3cd537e1/ruff-0.15.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:dab6941c862c05739774677c6273166d2510d254dac0695c0e3f5efa1b5585de", size = 10263122, upload-time = "2026-02-19T22:32:10.036Z" },
+    { url = "https://files.pythonhosted.org/packages/e7/ef/faf9321d550f8ebf0c6373696e70d1758e20ccdc3951ad7af00c0956be7c/ruff-0.15.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1b9164f57fc36058e9a6806eb92af185b0697c9fe4c7c52caa431c6554521e5c", size = 10735295, upload-time = "2026-02-19T22:32:39.227Z" },
+    { url = "https://files.pythonhosted.org/packages/2f/55/e8089fec62e050ba84d71b70e7834b97709ca9b7aba10c1a0b196e493f97/ruff-0.15.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:80d24fcae24d42659db7e335b9e1531697a7102c19185b8dc4a028b952865fd8", size = 11241641, upload-time = "2026-02-19T22:32:34.617Z" },
+    { url = "https://files.pythonhosted.org/packages/23/01/1c30526460f4d23222d0fabd5888868262fd0e2b71a00570ca26483cd993/ruff-0.15.2-py3-none-win32.whl", hash = "sha256:fd5ff9e5f519a7e1bd99cbe8daa324010a74f5e2ebc97c6242c08f26f3714f6f", size = 10507885, upload-time = "2026-02-19T22:32:15.635Z" },
+    { url = "https://files.pythonhosted.org/packages/5c/10/3d18e3bbdf8fc50bbb4ac3cc45970aa5a9753c5cb51bf9ed9a3cd8b79fa3/ruff-0.15.2-py3-none-win_amd64.whl", hash = "sha256:d20014e3dfa400f3ff84830dfb5755ece2de45ab62ecea4af6b7262d0fb4f7c5", size = 11623725, upload-time = "2026-02-19T22:32:04.947Z" },
+    { url = "https://files.pythonhosted.org/packages/6d/78/097c0798b1dab9f8affe73da9642bb4500e098cb27fd8dc9724816ac747b/ruff-0.15.2-py3-none-win_arm64.whl", hash = "sha256:cabddc5822acdc8f7b5527b36ceac55cc51eec7b1946e60181de8fe83ca8876e", size = 10941649, upload-time = "2026-02-19T22:32:18.108Z" },
 ]
 
 [[package]]

From fb2c1c9c3eab48e861a54e974555d5060c874f02 Mon Sep 17 00:00:00 2001
From: Rana Aurangzaib 
Date: Thu, 26 Feb 2026 16:36:46 +0300
Subject: [PATCH 17/17] Update version to 0.1.1 in pyproject.toml and uv.lock

---
 pyproject.toml | 2 +-
 uv.lock        | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/pyproject.toml b/pyproject.toml
index 1de2b0f..9ae1b5d 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
 [project]
 name = "libtea"
-version = "0.1.0"
+version = "0.1.1"
 description = "Python client library for the Transparency Exchange API (TEA)"
 authors = [{ name = "sbomify", email = "hello@sbomify.com" }]
 requires-python = ">=3.11"
diff --git a/uv.lock b/uv.lock
index 3d65f79..84a4d4b 100644
--- a/uv.lock
+++ b/uv.lock
@@ -262,7 +262,7 @@ wheels = [
 
 [[package]]
 name = "libtea"
-version = "0.1.0"
+version = "0.1.1"
 source = { editable = "." }
 dependencies = [
     { name = "pydantic" },