From 11f09da35a0b8e8a9d4b221724e6321cba967a88 Mon Sep 17 00:00:00 2001 From: "Joseph T. French" Date: Mon, 16 Mar 2026 22:59:59 -0500 Subject: [PATCH 1/5] Add new data models to the client library ## Summary This commit introduces several new data models to the `robosystems_client` library, enhancing the API's capabilities for handling account and ledger-related information. ## Key Accomplishments - **New Account Models**: Added `AccountListResponse`, `AccountResponse`, `AccountTreeNode`, and `AccountTreeResponse` to support account-related data structures. - **New Ledger Models**: Introduced `LedgerEntryResponse`, `LedgerLineItemResponse`, `LedgerSummaryResponse`, `LedgerTransactionDetailResponse`, `LedgerTransactionListResponse`, and `LedgerTransactionSummaryResponse` for comprehensive ledger management. - **Pagination Support**: Included `PaginationInfo` to facilitate paginated responses in API calls. - **Trial Balance Models**: Added `TrialBalanceResponse` and `TrialBalanceRow` to support trial balance reporting. ## Changes Breakdown - Updated `__init__.py` to include new models in the public API. ## Testing Notes - Ensure that the new models are correctly integrated and accessible within the client library. ## Infrastructure Considerations - No breaking changes introduced; existing functionality remains intact. --- robosystems_client/api/ledger/__init__.py | 1 + .../api/ledger/get_ledger_account_tree.py | 160 +++++++++ .../api/ledger/get_ledger_summary.py | 160 +++++++++ .../api/ledger/get_ledger_transaction.py | 174 ++++++++++ .../api/ledger/get_ledger_trial_balance.py | 211 ++++++++++++ .../api/ledger/list_ledger_accounts.py | 236 +++++++++++++ .../api/ledger/list_ledger_transactions.py | 261 +++++++++++++++ robosystems_client/models/__init__.py | 26 ++ .../models/account_list_response.py | 86 +++++ robosystems_client/models/account_response.py | 227 +++++++++++++ .../models/account_tree_node.py | 132 ++++++++ .../models/account_tree_response.py | 83 +++++ .../models/ledger_entry_response.py | 181 ++++++++++ .../models/ledger_line_item_response.py | 155 +++++++++ .../models/ledger_summary_response.py | 200 +++++++++++ .../ledger_transaction_detail_response.py | 315 ++++++++++++++++++ .../ledger_transaction_list_response.py | 92 +++++ .../ledger_transaction_summary_response.py | 243 ++++++++++++++ robosystems_client/models/pagination_info.py | 89 +++++ .../models/trial_balance_response.py | 91 +++++ .../models/trial_balance_row.py | 109 ++++++ 21 files changed, 3232 insertions(+) create mode 100644 robosystems_client/api/ledger/__init__.py create mode 100644 robosystems_client/api/ledger/get_ledger_account_tree.py create mode 100644 robosystems_client/api/ledger/get_ledger_summary.py create mode 100644 robosystems_client/api/ledger/get_ledger_transaction.py create mode 100644 robosystems_client/api/ledger/get_ledger_trial_balance.py create mode 100644 robosystems_client/api/ledger/list_ledger_accounts.py create mode 100644 robosystems_client/api/ledger/list_ledger_transactions.py create mode 100644 robosystems_client/models/account_list_response.py create mode 100644 robosystems_client/models/account_response.py create mode 100644 robosystems_client/models/account_tree_node.py create mode 100644 robosystems_client/models/account_tree_response.py create mode 100644 robosystems_client/models/ledger_entry_response.py create mode 100644 robosystems_client/models/ledger_line_item_response.py create mode 100644 robosystems_client/models/ledger_summary_response.py create mode 100644 robosystems_client/models/ledger_transaction_detail_response.py create mode 100644 robosystems_client/models/ledger_transaction_list_response.py create mode 100644 robosystems_client/models/ledger_transaction_summary_response.py create mode 100644 robosystems_client/models/pagination_info.py create mode 100644 robosystems_client/models/trial_balance_response.py create mode 100644 robosystems_client/models/trial_balance_row.py diff --git a/robosystems_client/api/ledger/__init__.py b/robosystems_client/api/ledger/__init__.py new file mode 100644 index 0000000..2d7c0b2 --- /dev/null +++ b/robosystems_client/api/ledger/__init__.py @@ -0,0 +1 @@ +"""Contains endpoint functions for accessing the API""" diff --git a/robosystems_client/api/ledger/get_ledger_account_tree.py b/robosystems_client/api/ledger/get_ledger_account_tree.py new file mode 100644 index 0000000..37ff7b4 --- /dev/null +++ b/robosystems_client/api/ledger/get_ledger_account_tree.py @@ -0,0 +1,160 @@ +from http import HTTPStatus +from typing import Any +from urllib.parse import quote + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.account_tree_response import AccountTreeResponse +from ...models.http_validation_error import HTTPValidationError +from ...types import Response + + +def _get_kwargs( + graph_id: str, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "method": "get", + "url": "/v1/ledger/{graph_id}/accounts/tree".format( + graph_id=quote(str(graph_id), safe=""), + ), + } + + return _kwargs + + +def _parse_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> AccountTreeResponse | HTTPValidationError | None: + if response.status_code == 200: + response_200 = AccountTreeResponse.from_dict(response.json()) + + return response_200 + + if response.status_code == 422: + response_422 = HTTPValidationError.from_dict(response.json()) + + return response_422 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[AccountTreeResponse | HTTPValidationError]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + graph_id: str, + *, + client: AuthenticatedClient, +) -> Response[AccountTreeResponse | HTTPValidationError]: + """Account Tree + + Args: + graph_id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[AccountTreeResponse | HTTPValidationError] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + graph_id: str, + *, + client: AuthenticatedClient, +) -> AccountTreeResponse | HTTPValidationError | None: + """Account Tree + + Args: + graph_id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + AccountTreeResponse | HTTPValidationError + """ + + return sync_detailed( + graph_id=graph_id, + client=client, + ).parsed + + +async def asyncio_detailed( + graph_id: str, + *, + client: AuthenticatedClient, +) -> Response[AccountTreeResponse | HTTPValidationError]: + """Account Tree + + Args: + graph_id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[AccountTreeResponse | HTTPValidationError] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + graph_id: str, + *, + client: AuthenticatedClient, +) -> AccountTreeResponse | HTTPValidationError | None: + """Account Tree + + Args: + graph_id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + AccountTreeResponse | HTTPValidationError + """ + + return ( + await asyncio_detailed( + graph_id=graph_id, + client=client, + ) + ).parsed diff --git a/robosystems_client/api/ledger/get_ledger_summary.py b/robosystems_client/api/ledger/get_ledger_summary.py new file mode 100644 index 0000000..112f138 --- /dev/null +++ b/robosystems_client/api/ledger/get_ledger_summary.py @@ -0,0 +1,160 @@ +from http import HTTPStatus +from typing import Any +from urllib.parse import quote + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.http_validation_error import HTTPValidationError +from ...models.ledger_summary_response import LedgerSummaryResponse +from ...types import Response + + +def _get_kwargs( + graph_id: str, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "method": "get", + "url": "/v1/ledger/{graph_id}/summary".format( + graph_id=quote(str(graph_id), safe=""), + ), + } + + return _kwargs + + +def _parse_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HTTPValidationError | LedgerSummaryResponse | None: + if response.status_code == 200: + response_200 = LedgerSummaryResponse.from_dict(response.json()) + + return response_200 + + if response.status_code == 422: + response_422 = HTTPValidationError.from_dict(response.json()) + + return response_422 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[HTTPValidationError | LedgerSummaryResponse]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + graph_id: str, + *, + client: AuthenticatedClient, +) -> Response[HTTPValidationError | LedgerSummaryResponse]: + """Ledger Summary + + Args: + graph_id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[HTTPValidationError | LedgerSummaryResponse] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + graph_id: str, + *, + client: AuthenticatedClient, +) -> HTTPValidationError | LedgerSummaryResponse | None: + """Ledger Summary + + Args: + graph_id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + HTTPValidationError | LedgerSummaryResponse + """ + + return sync_detailed( + graph_id=graph_id, + client=client, + ).parsed + + +async def asyncio_detailed( + graph_id: str, + *, + client: AuthenticatedClient, +) -> Response[HTTPValidationError | LedgerSummaryResponse]: + """Ledger Summary + + Args: + graph_id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[HTTPValidationError | LedgerSummaryResponse] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + graph_id: str, + *, + client: AuthenticatedClient, +) -> HTTPValidationError | LedgerSummaryResponse | None: + """Ledger Summary + + Args: + graph_id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + HTTPValidationError | LedgerSummaryResponse + """ + + return ( + await asyncio_detailed( + graph_id=graph_id, + client=client, + ) + ).parsed diff --git a/robosystems_client/api/ledger/get_ledger_transaction.py b/robosystems_client/api/ledger/get_ledger_transaction.py new file mode 100644 index 0000000..2fa191a --- /dev/null +++ b/robosystems_client/api/ledger/get_ledger_transaction.py @@ -0,0 +1,174 @@ +from http import HTTPStatus +from typing import Any +from urllib.parse import quote + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.http_validation_error import HTTPValidationError +from ...models.ledger_transaction_detail_response import LedgerTransactionDetailResponse +from ...types import Response + + +def _get_kwargs( + graph_id: str, + transaction_id: str, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "method": "get", + "url": "/v1/ledger/{graph_id}/transactions/{transaction_id}".format( + graph_id=quote(str(graph_id), safe=""), + transaction_id=quote(str(transaction_id), safe=""), + ), + } + + return _kwargs + + +def _parse_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HTTPValidationError | LedgerTransactionDetailResponse | None: + if response.status_code == 200: + response_200 = LedgerTransactionDetailResponse.from_dict(response.json()) + + return response_200 + + if response.status_code == 422: + response_422 = HTTPValidationError.from_dict(response.json()) + + return response_422 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[HTTPValidationError | LedgerTransactionDetailResponse]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + graph_id: str, + transaction_id: str, + *, + client: AuthenticatedClient, +) -> Response[HTTPValidationError | LedgerTransactionDetailResponse]: + """Transaction Detail + + Args: + graph_id (str): + transaction_id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[HTTPValidationError | LedgerTransactionDetailResponse] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + transaction_id=transaction_id, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + graph_id: str, + transaction_id: str, + *, + client: AuthenticatedClient, +) -> HTTPValidationError | LedgerTransactionDetailResponse | None: + """Transaction Detail + + Args: + graph_id (str): + transaction_id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + HTTPValidationError | LedgerTransactionDetailResponse + """ + + return sync_detailed( + graph_id=graph_id, + transaction_id=transaction_id, + client=client, + ).parsed + + +async def asyncio_detailed( + graph_id: str, + transaction_id: str, + *, + client: AuthenticatedClient, +) -> Response[HTTPValidationError | LedgerTransactionDetailResponse]: + """Transaction Detail + + Args: + graph_id (str): + transaction_id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[HTTPValidationError | LedgerTransactionDetailResponse] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + transaction_id=transaction_id, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + graph_id: str, + transaction_id: str, + *, + client: AuthenticatedClient, +) -> HTTPValidationError | LedgerTransactionDetailResponse | None: + """Transaction Detail + + Args: + graph_id (str): + transaction_id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + HTTPValidationError | LedgerTransactionDetailResponse + """ + + return ( + await asyncio_detailed( + graph_id=graph_id, + transaction_id=transaction_id, + client=client, + ) + ).parsed diff --git a/robosystems_client/api/ledger/get_ledger_trial_balance.py b/robosystems_client/api/ledger/get_ledger_trial_balance.py new file mode 100644 index 0000000..83a938c --- /dev/null +++ b/robosystems_client/api/ledger/get_ledger_trial_balance.py @@ -0,0 +1,211 @@ +import datetime +from http import HTTPStatus +from typing import Any +from urllib.parse import quote + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.http_validation_error import HTTPValidationError +from ...models.trial_balance_response import TrialBalanceResponse +from ...types import UNSET, Response, Unset + + +def _get_kwargs( + graph_id: str, + *, + start_date: datetime.date | None | Unset = UNSET, + end_date: datetime.date | None | Unset = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} + + json_start_date: None | str | Unset + if isinstance(start_date, Unset): + json_start_date = UNSET + elif isinstance(start_date, datetime.date): + json_start_date = start_date.isoformat() + else: + json_start_date = start_date + params["start_date"] = json_start_date + + json_end_date: None | str | Unset + if isinstance(end_date, Unset): + json_end_date = UNSET + elif isinstance(end_date, datetime.date): + json_end_date = end_date.isoformat() + else: + json_end_date = end_date + params["end_date"] = json_end_date + + params = {k: v for k, v in params.items() if v is not UNSET and v is not None} + + _kwargs: dict[str, Any] = { + "method": "get", + "url": "/v1/ledger/{graph_id}/trial-balance".format( + graph_id=quote(str(graph_id), safe=""), + ), + "params": params, + } + + return _kwargs + + +def _parse_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HTTPValidationError | TrialBalanceResponse | None: + if response.status_code == 200: + response_200 = TrialBalanceResponse.from_dict(response.json()) + + return response_200 + + if response.status_code == 422: + response_422 = HTTPValidationError.from_dict(response.json()) + + return response_422 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[HTTPValidationError | TrialBalanceResponse]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + graph_id: str, + *, + client: AuthenticatedClient, + start_date: datetime.date | None | Unset = UNSET, + end_date: datetime.date | None | Unset = UNSET, +) -> Response[HTTPValidationError | TrialBalanceResponse]: + """Trial Balance + + Args: + graph_id (str): + start_date (datetime.date | None | Unset): Start date (inclusive) + end_date (datetime.date | None | Unset): End date (inclusive) + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[HTTPValidationError | TrialBalanceResponse] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + start_date=start_date, + end_date=end_date, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + graph_id: str, + *, + client: AuthenticatedClient, + start_date: datetime.date | None | Unset = UNSET, + end_date: datetime.date | None | Unset = UNSET, +) -> HTTPValidationError | TrialBalanceResponse | None: + """Trial Balance + + Args: + graph_id (str): + start_date (datetime.date | None | Unset): Start date (inclusive) + end_date (datetime.date | None | Unset): End date (inclusive) + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + HTTPValidationError | TrialBalanceResponse + """ + + return sync_detailed( + graph_id=graph_id, + client=client, + start_date=start_date, + end_date=end_date, + ).parsed + + +async def asyncio_detailed( + graph_id: str, + *, + client: AuthenticatedClient, + start_date: datetime.date | None | Unset = UNSET, + end_date: datetime.date | None | Unset = UNSET, +) -> Response[HTTPValidationError | TrialBalanceResponse]: + """Trial Balance + + Args: + graph_id (str): + start_date (datetime.date | None | Unset): Start date (inclusive) + end_date (datetime.date | None | Unset): End date (inclusive) + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[HTTPValidationError | TrialBalanceResponse] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + start_date=start_date, + end_date=end_date, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + graph_id: str, + *, + client: AuthenticatedClient, + start_date: datetime.date | None | Unset = UNSET, + end_date: datetime.date | None | Unset = UNSET, +) -> HTTPValidationError | TrialBalanceResponse | None: + """Trial Balance + + Args: + graph_id (str): + start_date (datetime.date | None | Unset): Start date (inclusive) + end_date (datetime.date | None | Unset): End date (inclusive) + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + HTTPValidationError | TrialBalanceResponse + """ + + return ( + await asyncio_detailed( + graph_id=graph_id, + client=client, + start_date=start_date, + end_date=end_date, + ) + ).parsed diff --git a/robosystems_client/api/ledger/list_ledger_accounts.py b/robosystems_client/api/ledger/list_ledger_accounts.py new file mode 100644 index 0000000..0927ad1 --- /dev/null +++ b/robosystems_client/api/ledger/list_ledger_accounts.py @@ -0,0 +1,236 @@ +from http import HTTPStatus +from typing import Any +from urllib.parse import quote + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.account_list_response import AccountListResponse +from ...models.http_validation_error import HTTPValidationError +from ...types import UNSET, Response, Unset + + +def _get_kwargs( + graph_id: str, + *, + classification: None | str | Unset = UNSET, + is_active: bool | None | Unset = UNSET, + limit: int | Unset = 100, + offset: int | Unset = 0, +) -> dict[str, Any]: + params: dict[str, Any] = {} + + json_classification: None | str | Unset + if isinstance(classification, Unset): + json_classification = UNSET + else: + json_classification = classification + params["classification"] = json_classification + + json_is_active: bool | None | Unset + if isinstance(is_active, Unset): + json_is_active = UNSET + else: + json_is_active = is_active + params["is_active"] = json_is_active + + params["limit"] = limit + + params["offset"] = offset + + params = {k: v for k, v in params.items() if v is not UNSET and v is not None} + + _kwargs: dict[str, Any] = { + "method": "get", + "url": "/v1/ledger/{graph_id}/accounts".format( + graph_id=quote(str(graph_id), safe=""), + ), + "params": params, + } + + return _kwargs + + +def _parse_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> AccountListResponse | HTTPValidationError | None: + if response.status_code == 200: + response_200 = AccountListResponse.from_dict(response.json()) + + return response_200 + + if response.status_code == 422: + response_422 = HTTPValidationError.from_dict(response.json()) + + return response_422 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[AccountListResponse | HTTPValidationError]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + graph_id: str, + *, + client: AuthenticatedClient, + classification: None | str | Unset = UNSET, + is_active: bool | None | Unset = UNSET, + limit: int | Unset = 100, + offset: int | Unset = 0, +) -> Response[AccountListResponse | HTTPValidationError]: + """List Accounts + + Args: + graph_id (str): + classification (None | str | Unset): Filter by classification + is_active (bool | None | Unset): Filter by active status + limit (int | Unset): Default: 100. + offset (int | Unset): Default: 0. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[AccountListResponse | HTTPValidationError] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + classification=classification, + is_active=is_active, + limit=limit, + offset=offset, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + graph_id: str, + *, + client: AuthenticatedClient, + classification: None | str | Unset = UNSET, + is_active: bool | None | Unset = UNSET, + limit: int | Unset = 100, + offset: int | Unset = 0, +) -> AccountListResponse | HTTPValidationError | None: + """List Accounts + + Args: + graph_id (str): + classification (None | str | Unset): Filter by classification + is_active (bool | None | Unset): Filter by active status + limit (int | Unset): Default: 100. + offset (int | Unset): Default: 0. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + AccountListResponse | HTTPValidationError + """ + + return sync_detailed( + graph_id=graph_id, + client=client, + classification=classification, + is_active=is_active, + limit=limit, + offset=offset, + ).parsed + + +async def asyncio_detailed( + graph_id: str, + *, + client: AuthenticatedClient, + classification: None | str | Unset = UNSET, + is_active: bool | None | Unset = UNSET, + limit: int | Unset = 100, + offset: int | Unset = 0, +) -> Response[AccountListResponse | HTTPValidationError]: + """List Accounts + + Args: + graph_id (str): + classification (None | str | Unset): Filter by classification + is_active (bool | None | Unset): Filter by active status + limit (int | Unset): Default: 100. + offset (int | Unset): Default: 0. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[AccountListResponse | HTTPValidationError] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + classification=classification, + is_active=is_active, + limit=limit, + offset=offset, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + graph_id: str, + *, + client: AuthenticatedClient, + classification: None | str | Unset = UNSET, + is_active: bool | None | Unset = UNSET, + limit: int | Unset = 100, + offset: int | Unset = 0, +) -> AccountListResponse | HTTPValidationError | None: + """List Accounts + + Args: + graph_id (str): + classification (None | str | Unset): Filter by classification + is_active (bool | None | Unset): Filter by active status + limit (int | Unset): Default: 100. + offset (int | Unset): Default: 0. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + AccountListResponse | HTTPValidationError + """ + + return ( + await asyncio_detailed( + graph_id=graph_id, + client=client, + classification=classification, + is_active=is_active, + limit=limit, + offset=offset, + ) + ).parsed diff --git a/robosystems_client/api/ledger/list_ledger_transactions.py b/robosystems_client/api/ledger/list_ledger_transactions.py new file mode 100644 index 0000000..34b68f5 --- /dev/null +++ b/robosystems_client/api/ledger/list_ledger_transactions.py @@ -0,0 +1,261 @@ +import datetime +from http import HTTPStatus +from typing import Any +from urllib.parse import quote + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.http_validation_error import HTTPValidationError +from ...models.ledger_transaction_list_response import LedgerTransactionListResponse +from ...types import UNSET, Response, Unset + + +def _get_kwargs( + graph_id: str, + *, + type_: None | str | Unset = UNSET, + start_date: datetime.date | None | Unset = UNSET, + end_date: datetime.date | None | Unset = UNSET, + limit: int | Unset = 100, + offset: int | Unset = 0, +) -> dict[str, Any]: + params: dict[str, Any] = {} + + json_type_: None | str | Unset + if isinstance(type_, Unset): + json_type_ = UNSET + else: + json_type_ = type_ + params["type"] = json_type_ + + json_start_date: None | str | Unset + if isinstance(start_date, Unset): + json_start_date = UNSET + elif isinstance(start_date, datetime.date): + json_start_date = start_date.isoformat() + else: + json_start_date = start_date + params["start_date"] = json_start_date + + json_end_date: None | str | Unset + if isinstance(end_date, Unset): + json_end_date = UNSET + elif isinstance(end_date, datetime.date): + json_end_date = end_date.isoformat() + else: + json_end_date = end_date + params["end_date"] = json_end_date + + params["limit"] = limit + + params["offset"] = offset + + params = {k: v for k, v in params.items() if v is not UNSET and v is not None} + + _kwargs: dict[str, Any] = { + "method": "get", + "url": "/v1/ledger/{graph_id}/transactions".format( + graph_id=quote(str(graph_id), safe=""), + ), + "params": params, + } + + return _kwargs + + +def _parse_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HTTPValidationError | LedgerTransactionListResponse | None: + if response.status_code == 200: + response_200 = LedgerTransactionListResponse.from_dict(response.json()) + + return response_200 + + if response.status_code == 422: + response_422 = HTTPValidationError.from_dict(response.json()) + + return response_422 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[HTTPValidationError | LedgerTransactionListResponse]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + graph_id: str, + *, + client: AuthenticatedClient, + type_: None | str | Unset = UNSET, + start_date: datetime.date | None | Unset = UNSET, + end_date: datetime.date | None | Unset = UNSET, + limit: int | Unset = 100, + offset: int | Unset = 0, +) -> Response[HTTPValidationError | LedgerTransactionListResponse]: + """List Transactions + + Args: + graph_id (str): + type_ (None | str | Unset): Filter by transaction type + start_date (datetime.date | None | Unset): Start date (inclusive) + end_date (datetime.date | None | Unset): End date (inclusive) + limit (int | Unset): Default: 100. + offset (int | Unset): Default: 0. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[HTTPValidationError | LedgerTransactionListResponse] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + type_=type_, + start_date=start_date, + end_date=end_date, + limit=limit, + offset=offset, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + graph_id: str, + *, + client: AuthenticatedClient, + type_: None | str | Unset = UNSET, + start_date: datetime.date | None | Unset = UNSET, + end_date: datetime.date | None | Unset = UNSET, + limit: int | Unset = 100, + offset: int | Unset = 0, +) -> HTTPValidationError | LedgerTransactionListResponse | None: + """List Transactions + + Args: + graph_id (str): + type_ (None | str | Unset): Filter by transaction type + start_date (datetime.date | None | Unset): Start date (inclusive) + end_date (datetime.date | None | Unset): End date (inclusive) + limit (int | Unset): Default: 100. + offset (int | Unset): Default: 0. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + HTTPValidationError | LedgerTransactionListResponse + """ + + return sync_detailed( + graph_id=graph_id, + client=client, + type_=type_, + start_date=start_date, + end_date=end_date, + limit=limit, + offset=offset, + ).parsed + + +async def asyncio_detailed( + graph_id: str, + *, + client: AuthenticatedClient, + type_: None | str | Unset = UNSET, + start_date: datetime.date | None | Unset = UNSET, + end_date: datetime.date | None | Unset = UNSET, + limit: int | Unset = 100, + offset: int | Unset = 0, +) -> Response[HTTPValidationError | LedgerTransactionListResponse]: + """List Transactions + + Args: + graph_id (str): + type_ (None | str | Unset): Filter by transaction type + start_date (datetime.date | None | Unset): Start date (inclusive) + end_date (datetime.date | None | Unset): End date (inclusive) + limit (int | Unset): Default: 100. + offset (int | Unset): Default: 0. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[HTTPValidationError | LedgerTransactionListResponse] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + type_=type_, + start_date=start_date, + end_date=end_date, + limit=limit, + offset=offset, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + graph_id: str, + *, + client: AuthenticatedClient, + type_: None | str | Unset = UNSET, + start_date: datetime.date | None | Unset = UNSET, + end_date: datetime.date | None | Unset = UNSET, + limit: int | Unset = 100, + offset: int | Unset = 0, +) -> HTTPValidationError | LedgerTransactionListResponse | None: + """List Transactions + + Args: + graph_id (str): + type_ (None | str | Unset): Filter by transaction type + start_date (datetime.date | None | Unset): Start date (inclusive) + end_date (datetime.date | None | Unset): End date (inclusive) + limit (int | Unset): Default: 100. + offset (int | Unset): Default: 0. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + HTTPValidationError | LedgerTransactionListResponse + """ + + return ( + await asyncio_detailed( + graph_id=graph_id, + client=client, + type_=type_, + start_date=start_date, + end_date=end_date, + limit=limit, + offset=offset, + ) + ).parsed diff --git a/robosystems_client/models/__init__.py b/robosystems_client/models/__init__.py index 306df7d..e55770e 100644 --- a/robosystems_client/models/__init__.py +++ b/robosystems_client/models/__init__.py @@ -1,6 +1,10 @@ """Contains all the data models used in inputs/outputs""" from .account_info import AccountInfo +from .account_list_response import AccountListResponse +from .account_response import AccountResponse +from .account_tree_node import AccountTreeNode +from .account_tree_response import AccountTreeResponse from .agent_list_response import AgentListResponse from .agent_list_response_agents import AgentListResponseAgents from .agent_list_response_agents_additional_property import ( @@ -149,6 +153,12 @@ from .invoice import Invoice from .invoice_line_item import InvoiceLineItem from .invoices_response import InvoicesResponse +from .ledger_entry_response import LedgerEntryResponse +from .ledger_line_item_response import LedgerLineItemResponse +from .ledger_summary_response import LedgerSummaryResponse +from .ledger_transaction_detail_response import LedgerTransactionDetailResponse +from .ledger_transaction_list_response import LedgerTransactionListResponse +from .ledger_transaction_summary_response import LedgerTransactionSummaryResponse from .list_connections_provider_type_0 import ListConnectionsProviderType0 from .list_org_graphs_response_200_item import ListOrgGraphsResponse200Item from .list_subgraphs_response import ListSubgraphsResponse @@ -192,6 +202,7 @@ from .org_usage_response_daily_trend_item import OrgUsageResponseDailyTrendItem from .org_usage_response_graph_details_item import OrgUsageResponseGraphDetailsItem from .org_usage_summary import OrgUsageSummary +from .pagination_info import PaginationInfo from .password_check_request import PasswordCheckRequest from .password_check_response import PasswordCheckResponse from .password_check_response_character_types import PasswordCheckResponseCharacterTypes @@ -262,6 +273,8 @@ from .tier_capacity import TierCapacity from .token_pricing import TokenPricing from .transaction_summary_response import TransactionSummaryResponse +from .trial_balance_response import TrialBalanceResponse +from .trial_balance_row import TrialBalanceRow from .upcoming_invoice import UpcomingInvoice from .update_api_key_request import UpdateAPIKeyRequest from .update_file_response_updatefile import UpdateFileResponseUpdatefile @@ -281,6 +294,10 @@ __all__ = ( "AccountInfo", + "AccountListResponse", + "AccountResponse", + "AccountTreeNode", + "AccountTreeResponse", "AgentListResponse", "AgentListResponseAgents", "AgentListResponseAgentsAdditionalProperty", @@ -403,6 +420,12 @@ "Invoice", "InvoiceLineItem", "InvoicesResponse", + "LedgerEntryResponse", + "LedgerLineItemResponse", + "LedgerSummaryResponse", + "LedgerTransactionDetailResponse", + "LedgerTransactionListResponse", + "LedgerTransactionSummaryResponse", "ListConnectionsProviderType0", "ListOrgGraphsResponse200Item", "ListSubgraphsResponse", @@ -442,6 +465,7 @@ "OrgUsageResponseDailyTrendItem", "OrgUsageResponseGraphDetailsItem", "OrgUsageSummary", + "PaginationInfo", "PasswordCheckRequest", "PasswordCheckResponse", "PasswordCheckResponseCharacterTypes", @@ -500,6 +524,8 @@ "TierCapacity", "TokenPricing", "TransactionSummaryResponse", + "TrialBalanceResponse", + "TrialBalanceRow", "UpcomingInvoice", "UpdateAPIKeyRequest", "UpdateFileResponseUpdatefile", diff --git a/robosystems_client/models/account_list_response.py b/robosystems_client/models/account_list_response.py new file mode 100644 index 0000000..3fd4774 --- /dev/null +++ b/robosystems_client/models/account_list_response.py @@ -0,0 +1,86 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +if TYPE_CHECKING: + from ..models.account_response import AccountResponse + from ..models.pagination_info import PaginationInfo + + +T = TypeVar("T", bound="AccountListResponse") + + +@_attrs_define +class AccountListResponse: + """ + Attributes: + accounts (list[AccountResponse]): + pagination (PaginationInfo): Pagination information for list responses. Example: {'has_more': True, 'limit': 20, + 'offset': 0, 'total': 100}. + """ + + accounts: list[AccountResponse] + pagination: PaginationInfo + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + accounts = [] + for accounts_item_data in self.accounts: + accounts_item = accounts_item_data.to_dict() + accounts.append(accounts_item) + + pagination = self.pagination.to_dict() + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "accounts": accounts, + "pagination": pagination, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.account_response import AccountResponse + from ..models.pagination_info import PaginationInfo + + d = dict(src_dict) + accounts = [] + _accounts = d.pop("accounts") + for accounts_item_data in _accounts: + accounts_item = AccountResponse.from_dict(accounts_item_data) + + accounts.append(accounts_item) + + pagination = PaginationInfo.from_dict(d.pop("pagination")) + + account_list_response = cls( + accounts=accounts, + pagination=pagination, + ) + + account_list_response.additional_properties = d + return account_list_response + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/account_response.py b/robosystems_client/models/account_response.py new file mode 100644 index 0000000..accc8e2 --- /dev/null +++ b/robosystems_client/models/account_response.py @@ -0,0 +1,227 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="AccountResponse") + + +@_attrs_define +class AccountResponse: + """ + Attributes: + id (str): + code (str): + name (str): + classification (str): + balance_type (str): + depth (int): + currency (str): + is_active (bool): + is_placeholder (bool): + description (None | str | Unset): + sub_classification (None | str | Unset): + parent_id (None | str | Unset): + external_id (None | str | Unset): + external_source (None | str | Unset): + """ + + id: str + code: str + name: str + classification: str + balance_type: str + depth: int + currency: str + is_active: bool + is_placeholder: bool + description: None | str | Unset = UNSET + sub_classification: None | str | Unset = UNSET + parent_id: None | str | Unset = UNSET + external_id: None | str | Unset = UNSET + external_source: None | str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + id = self.id + + code = self.code + + name = self.name + + classification = self.classification + + balance_type = self.balance_type + + depth = self.depth + + currency = self.currency + + is_active = self.is_active + + is_placeholder = self.is_placeholder + + description: None | str | Unset + if isinstance(self.description, Unset): + description = UNSET + else: + description = self.description + + sub_classification: None | str | Unset + if isinstance(self.sub_classification, Unset): + sub_classification = UNSET + else: + sub_classification = self.sub_classification + + parent_id: None | str | Unset + if isinstance(self.parent_id, Unset): + parent_id = UNSET + else: + parent_id = self.parent_id + + external_id: None | str | Unset + if isinstance(self.external_id, Unset): + external_id = UNSET + else: + external_id = self.external_id + + external_source: None | str | Unset + if isinstance(self.external_source, Unset): + external_source = UNSET + else: + external_source = self.external_source + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "id": id, + "code": code, + "name": name, + "classification": classification, + "balance_type": balance_type, + "depth": depth, + "currency": currency, + "is_active": is_active, + "is_placeholder": is_placeholder, + } + ) + if description is not UNSET: + field_dict["description"] = description + if sub_classification is not UNSET: + field_dict["sub_classification"] = sub_classification + if parent_id is not UNSET: + field_dict["parent_id"] = parent_id + if external_id is not UNSET: + field_dict["external_id"] = external_id + if external_source is not UNSET: + field_dict["external_source"] = external_source + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + id = d.pop("id") + + code = d.pop("code") + + name = d.pop("name") + + classification = d.pop("classification") + + balance_type = d.pop("balance_type") + + depth = d.pop("depth") + + currency = d.pop("currency") + + is_active = d.pop("is_active") + + is_placeholder = d.pop("is_placeholder") + + def _parse_description(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + description = _parse_description(d.pop("description", UNSET)) + + def _parse_sub_classification(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + sub_classification = _parse_sub_classification(d.pop("sub_classification", UNSET)) + + def _parse_parent_id(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + parent_id = _parse_parent_id(d.pop("parent_id", UNSET)) + + def _parse_external_id(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + external_id = _parse_external_id(d.pop("external_id", UNSET)) + + def _parse_external_source(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + external_source = _parse_external_source(d.pop("external_source", UNSET)) + + account_response = cls( + id=id, + code=code, + name=name, + classification=classification, + balance_type=balance_type, + depth=depth, + currency=currency, + is_active=is_active, + is_placeholder=is_placeholder, + description=description, + sub_classification=sub_classification, + parent_id=parent_id, + external_id=external_id, + external_source=external_source, + ) + + account_response.additional_properties = d + return account_response + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/account_tree_node.py b/robosystems_client/models/account_tree_node.py new file mode 100644 index 0000000..7a2062e --- /dev/null +++ b/robosystems_client/models/account_tree_node.py @@ -0,0 +1,132 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="AccountTreeNode") + + +@_attrs_define +class AccountTreeNode: + """ + Attributes: + id (str): + code (str): + name (str): + classification (str): + balance_type (str): + depth (int): + is_active (bool): + children (list[AccountTreeNode] | Unset): + """ + + id: str + code: str + name: str + classification: str + balance_type: str + depth: int + is_active: bool + children: list[AccountTreeNode] | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + id = self.id + + code = self.code + + name = self.name + + classification = self.classification + + balance_type = self.balance_type + + depth = self.depth + + is_active = self.is_active + + children: list[dict[str, Any]] | Unset = UNSET + if not isinstance(self.children, Unset): + children = [] + for children_item_data in self.children: + children_item = children_item_data.to_dict() + children.append(children_item) + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "id": id, + "code": code, + "name": name, + "classification": classification, + "balance_type": balance_type, + "depth": depth, + "is_active": is_active, + } + ) + if children is not UNSET: + field_dict["children"] = children + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + id = d.pop("id") + + code = d.pop("code") + + name = d.pop("name") + + classification = d.pop("classification") + + balance_type = d.pop("balance_type") + + depth = d.pop("depth") + + is_active = d.pop("is_active") + + _children = d.pop("children", UNSET) + children: list[AccountTreeNode] | Unset = UNSET + if _children is not UNSET: + children = [] + for children_item_data in _children: + children_item = AccountTreeNode.from_dict(children_item_data) + + children.append(children_item) + + account_tree_node = cls( + id=id, + code=code, + name=name, + classification=classification, + balance_type=balance_type, + depth=depth, + is_active=is_active, + children=children, + ) + + account_tree_node.additional_properties = d + return account_tree_node + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/account_tree_response.py b/robosystems_client/models/account_tree_response.py new file mode 100644 index 0000000..4149f00 --- /dev/null +++ b/robosystems_client/models/account_tree_response.py @@ -0,0 +1,83 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +if TYPE_CHECKING: + from ..models.account_tree_node import AccountTreeNode + + +T = TypeVar("T", bound="AccountTreeResponse") + + +@_attrs_define +class AccountTreeResponse: + """ + Attributes: + roots (list[AccountTreeNode]): + total_accounts (int): + """ + + roots: list[AccountTreeNode] + total_accounts: int + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + roots = [] + for roots_item_data in self.roots: + roots_item = roots_item_data.to_dict() + roots.append(roots_item) + + total_accounts = self.total_accounts + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "roots": roots, + "total_accounts": total_accounts, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.account_tree_node import AccountTreeNode + + d = dict(src_dict) + roots = [] + _roots = d.pop("roots") + for roots_item_data in _roots: + roots_item = AccountTreeNode.from_dict(roots_item_data) + + roots.append(roots_item) + + total_accounts = d.pop("total_accounts") + + account_tree_response = cls( + roots=roots, + total_accounts=total_accounts, + ) + + account_tree_response.additional_properties = d + return account_tree_response + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/ledger_entry_response.py b/robosystems_client/models/ledger_entry_response.py new file mode 100644 index 0000000..e189a7d --- /dev/null +++ b/robosystems_client/models/ledger_entry_response.py @@ -0,0 +1,181 @@ +from __future__ import annotations + +import datetime +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field +from dateutil.parser import isoparse + +from ..types import UNSET, Unset + +if TYPE_CHECKING: + from ..models.ledger_line_item_response import LedgerLineItemResponse + + +T = TypeVar("T", bound="LedgerEntryResponse") + + +@_attrs_define +class LedgerEntryResponse: + """ + Attributes: + id (str): + type_ (str): + posting_date (datetime.date): + status (str): + line_items (list[LedgerLineItemResponse]): + number (None | str | Unset): + memo (None | str | Unset): + posted_at (datetime.datetime | None | Unset): + """ + + id: str + type_: str + posting_date: datetime.date + status: str + line_items: list[LedgerLineItemResponse] + number: None | str | Unset = UNSET + memo: None | str | Unset = UNSET + posted_at: datetime.datetime | None | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + id = self.id + + type_ = self.type_ + + posting_date = self.posting_date.isoformat() + + status = self.status + + line_items = [] + for line_items_item_data in self.line_items: + line_items_item = line_items_item_data.to_dict() + line_items.append(line_items_item) + + number: None | str | Unset + if isinstance(self.number, Unset): + number = UNSET + else: + number = self.number + + memo: None | str | Unset + if isinstance(self.memo, Unset): + memo = UNSET + else: + memo = self.memo + + posted_at: None | str | Unset + if isinstance(self.posted_at, Unset): + posted_at = UNSET + elif isinstance(self.posted_at, datetime.datetime): + posted_at = self.posted_at.isoformat() + else: + posted_at = self.posted_at + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "id": id, + "type": type_, + "posting_date": posting_date, + "status": status, + "line_items": line_items, + } + ) + if number is not UNSET: + field_dict["number"] = number + if memo is not UNSET: + field_dict["memo"] = memo + if posted_at is not UNSET: + field_dict["posted_at"] = posted_at + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.ledger_line_item_response import LedgerLineItemResponse + + d = dict(src_dict) + id = d.pop("id") + + type_ = d.pop("type") + + posting_date = isoparse(d.pop("posting_date")).date() + + status = d.pop("status") + + line_items = [] + _line_items = d.pop("line_items") + for line_items_item_data in _line_items: + line_items_item = LedgerLineItemResponse.from_dict(line_items_item_data) + + line_items.append(line_items_item) + + def _parse_number(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + number = _parse_number(d.pop("number", UNSET)) + + def _parse_memo(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + memo = _parse_memo(d.pop("memo", UNSET)) + + def _parse_posted_at(data: object) -> datetime.datetime | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + posted_at_type_0 = isoparse(data) + + return posted_at_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(datetime.datetime | None | Unset, data) + + posted_at = _parse_posted_at(d.pop("posted_at", UNSET)) + + ledger_entry_response = cls( + id=id, + type_=type_, + posting_date=posting_date, + status=status, + line_items=line_items, + number=number, + memo=memo, + posted_at=posted_at, + ) + + ledger_entry_response.additional_properties = d + return ledger_entry_response + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/ledger_line_item_response.py b/robosystems_client/models/ledger_line_item_response.py new file mode 100644 index 0000000..d997ee5 --- /dev/null +++ b/robosystems_client/models/ledger_line_item_response.py @@ -0,0 +1,155 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="LedgerLineItemResponse") + + +@_attrs_define +class LedgerLineItemResponse: + """ + Attributes: + id (str): + account_id (str): + debit_amount (float): + credit_amount (float): + line_order (int): + account_name (None | str | Unset): + account_code (None | str | Unset): + description (None | str | Unset): + """ + + id: str + account_id: str + debit_amount: float + credit_amount: float + line_order: int + account_name: None | str | Unset = UNSET + account_code: None | str | Unset = UNSET + description: None | str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + id = self.id + + account_id = self.account_id + + debit_amount = self.debit_amount + + credit_amount = self.credit_amount + + line_order = self.line_order + + account_name: None | str | Unset + if isinstance(self.account_name, Unset): + account_name = UNSET + else: + account_name = self.account_name + + account_code: None | str | Unset + if isinstance(self.account_code, Unset): + account_code = UNSET + else: + account_code = self.account_code + + description: None | str | Unset + if isinstance(self.description, Unset): + description = UNSET + else: + description = self.description + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "id": id, + "account_id": account_id, + "debit_amount": debit_amount, + "credit_amount": credit_amount, + "line_order": line_order, + } + ) + if account_name is not UNSET: + field_dict["account_name"] = account_name + if account_code is not UNSET: + field_dict["account_code"] = account_code + if description is not UNSET: + field_dict["description"] = description + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + id = d.pop("id") + + account_id = d.pop("account_id") + + debit_amount = d.pop("debit_amount") + + credit_amount = d.pop("credit_amount") + + line_order = d.pop("line_order") + + def _parse_account_name(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + account_name = _parse_account_name(d.pop("account_name", UNSET)) + + def _parse_account_code(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + account_code = _parse_account_code(d.pop("account_code", UNSET)) + + def _parse_description(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + description = _parse_description(d.pop("description", UNSET)) + + ledger_line_item_response = cls( + id=id, + account_id=account_id, + debit_amount=debit_amount, + credit_amount=credit_amount, + line_order=line_order, + account_name=account_name, + account_code=account_code, + description=description, + ) + + ledger_line_item_response.additional_properties = d + return ledger_line_item_response + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/ledger_summary_response.py b/robosystems_client/models/ledger_summary_response.py new file mode 100644 index 0000000..8ba2a5a --- /dev/null +++ b/robosystems_client/models/ledger_summary_response.py @@ -0,0 +1,200 @@ +from __future__ import annotations + +import datetime +from collections.abc import Mapping +from typing import Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field +from dateutil.parser import isoparse + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="LedgerSummaryResponse") + + +@_attrs_define +class LedgerSummaryResponse: + """ + Attributes: + graph_id (str): + account_count (int): + transaction_count (int): + entry_count (int): + line_item_count (int): + earliest_transaction_date (datetime.date | None | Unset): + latest_transaction_date (datetime.date | None | Unset): + connection_count (int | Unset): Default: 0. + last_sync_at (datetime.datetime | None | Unset): + """ + + graph_id: str + account_count: int + transaction_count: int + entry_count: int + line_item_count: int + earliest_transaction_date: datetime.date | None | Unset = UNSET + latest_transaction_date: datetime.date | None | Unset = UNSET + connection_count: int | Unset = 0 + last_sync_at: datetime.datetime | None | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + graph_id = self.graph_id + + account_count = self.account_count + + transaction_count = self.transaction_count + + entry_count = self.entry_count + + line_item_count = self.line_item_count + + earliest_transaction_date: None | str | Unset + if isinstance(self.earliest_transaction_date, Unset): + earliest_transaction_date = UNSET + elif isinstance(self.earliest_transaction_date, datetime.date): + earliest_transaction_date = self.earliest_transaction_date.isoformat() + else: + earliest_transaction_date = self.earliest_transaction_date + + latest_transaction_date: None | str | Unset + if isinstance(self.latest_transaction_date, Unset): + latest_transaction_date = UNSET + elif isinstance(self.latest_transaction_date, datetime.date): + latest_transaction_date = self.latest_transaction_date.isoformat() + else: + latest_transaction_date = self.latest_transaction_date + + connection_count = self.connection_count + + last_sync_at: None | str | Unset + if isinstance(self.last_sync_at, Unset): + last_sync_at = UNSET + elif isinstance(self.last_sync_at, datetime.datetime): + last_sync_at = self.last_sync_at.isoformat() + else: + last_sync_at = self.last_sync_at + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "graph_id": graph_id, + "account_count": account_count, + "transaction_count": transaction_count, + "entry_count": entry_count, + "line_item_count": line_item_count, + } + ) + if earliest_transaction_date is not UNSET: + field_dict["earliest_transaction_date"] = earliest_transaction_date + if latest_transaction_date is not UNSET: + field_dict["latest_transaction_date"] = latest_transaction_date + if connection_count is not UNSET: + field_dict["connection_count"] = connection_count + if last_sync_at is not UNSET: + field_dict["last_sync_at"] = last_sync_at + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + graph_id = d.pop("graph_id") + + account_count = d.pop("account_count") + + transaction_count = d.pop("transaction_count") + + entry_count = d.pop("entry_count") + + line_item_count = d.pop("line_item_count") + + def _parse_earliest_transaction_date(data: object) -> datetime.date | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + earliest_transaction_date_type_0 = isoparse(data).date() + + return earliest_transaction_date_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(datetime.date | None | Unset, data) + + earliest_transaction_date = _parse_earliest_transaction_date( + d.pop("earliest_transaction_date", UNSET) + ) + + def _parse_latest_transaction_date(data: object) -> datetime.date | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + latest_transaction_date_type_0 = isoparse(data).date() + + return latest_transaction_date_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(datetime.date | None | Unset, data) + + latest_transaction_date = _parse_latest_transaction_date( + d.pop("latest_transaction_date", UNSET) + ) + + connection_count = d.pop("connection_count", UNSET) + + def _parse_last_sync_at(data: object) -> datetime.datetime | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + last_sync_at_type_0 = isoparse(data) + + return last_sync_at_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(datetime.datetime | None | Unset, data) + + last_sync_at = _parse_last_sync_at(d.pop("last_sync_at", UNSET)) + + ledger_summary_response = cls( + graph_id=graph_id, + account_count=account_count, + transaction_count=transaction_count, + entry_count=entry_count, + line_item_count=line_item_count, + earliest_transaction_date=earliest_transaction_date, + latest_transaction_date=latest_transaction_date, + connection_count=connection_count, + last_sync_at=last_sync_at, + ) + + ledger_summary_response.additional_properties = d + return ledger_summary_response + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/ledger_transaction_detail_response.py b/robosystems_client/models/ledger_transaction_detail_response.py new file mode 100644 index 0000000..fef8145 --- /dev/null +++ b/robosystems_client/models/ledger_transaction_detail_response.py @@ -0,0 +1,315 @@ +from __future__ import annotations + +import datetime +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field +from dateutil.parser import isoparse + +from ..types import UNSET, Unset + +if TYPE_CHECKING: + from ..models.ledger_entry_response import LedgerEntryResponse + + +T = TypeVar("T", bound="LedgerTransactionDetailResponse") + + +@_attrs_define +class LedgerTransactionDetailResponse: + """ + Attributes: + id (str): + type_ (str): + amount (float): + currency (str): + date (datetime.date): + source (str): + status (str): + entries (list[LedgerEntryResponse]): + number (None | str | Unset): + category (None | str | Unset): + due_date (datetime.date | None | Unset): + merchant_name (None | str | Unset): + reference_number (None | str | Unset): + description (None | str | Unset): + source_id (None | str | Unset): + posted_at (datetime.datetime | None | Unset): + """ + + id: str + type_: str + amount: float + currency: str + date: datetime.date + source: str + status: str + entries: list[LedgerEntryResponse] + number: None | str | Unset = UNSET + category: None | str | Unset = UNSET + due_date: datetime.date | None | Unset = UNSET + merchant_name: None | str | Unset = UNSET + reference_number: None | str | Unset = UNSET + description: None | str | Unset = UNSET + source_id: None | str | Unset = UNSET + posted_at: datetime.datetime | None | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + id = self.id + + type_ = self.type_ + + amount = self.amount + + currency = self.currency + + date = self.date.isoformat() + + source = self.source + + status = self.status + + entries = [] + for entries_item_data in self.entries: + entries_item = entries_item_data.to_dict() + entries.append(entries_item) + + number: None | str | Unset + if isinstance(self.number, Unset): + number = UNSET + else: + number = self.number + + category: None | str | Unset + if isinstance(self.category, Unset): + category = UNSET + else: + category = self.category + + due_date: None | str | Unset + if isinstance(self.due_date, Unset): + due_date = UNSET + elif isinstance(self.due_date, datetime.date): + due_date = self.due_date.isoformat() + else: + due_date = self.due_date + + merchant_name: None | str | Unset + if isinstance(self.merchant_name, Unset): + merchant_name = UNSET + else: + merchant_name = self.merchant_name + + reference_number: None | str | Unset + if isinstance(self.reference_number, Unset): + reference_number = UNSET + else: + reference_number = self.reference_number + + description: None | str | Unset + if isinstance(self.description, Unset): + description = UNSET + else: + description = self.description + + source_id: None | str | Unset + if isinstance(self.source_id, Unset): + source_id = UNSET + else: + source_id = self.source_id + + posted_at: None | str | Unset + if isinstance(self.posted_at, Unset): + posted_at = UNSET + elif isinstance(self.posted_at, datetime.datetime): + posted_at = self.posted_at.isoformat() + else: + posted_at = self.posted_at + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "id": id, + "type": type_, + "amount": amount, + "currency": currency, + "date": date, + "source": source, + "status": status, + "entries": entries, + } + ) + if number is not UNSET: + field_dict["number"] = number + if category is not UNSET: + field_dict["category"] = category + if due_date is not UNSET: + field_dict["due_date"] = due_date + if merchant_name is not UNSET: + field_dict["merchant_name"] = merchant_name + if reference_number is not UNSET: + field_dict["reference_number"] = reference_number + if description is not UNSET: + field_dict["description"] = description + if source_id is not UNSET: + field_dict["source_id"] = source_id + if posted_at is not UNSET: + field_dict["posted_at"] = posted_at + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.ledger_entry_response import LedgerEntryResponse + + d = dict(src_dict) + id = d.pop("id") + + type_ = d.pop("type") + + amount = d.pop("amount") + + currency = d.pop("currency") + + date = isoparse(d.pop("date")).date() + + source = d.pop("source") + + status = d.pop("status") + + entries = [] + _entries = d.pop("entries") + for entries_item_data in _entries: + entries_item = LedgerEntryResponse.from_dict(entries_item_data) + + entries.append(entries_item) + + def _parse_number(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + number = _parse_number(d.pop("number", UNSET)) + + def _parse_category(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + category = _parse_category(d.pop("category", UNSET)) + + def _parse_due_date(data: object) -> datetime.date | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + due_date_type_0 = isoparse(data).date() + + return due_date_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(datetime.date | None | Unset, data) + + due_date = _parse_due_date(d.pop("due_date", UNSET)) + + def _parse_merchant_name(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + merchant_name = _parse_merchant_name(d.pop("merchant_name", UNSET)) + + def _parse_reference_number(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + reference_number = _parse_reference_number(d.pop("reference_number", UNSET)) + + def _parse_description(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + description = _parse_description(d.pop("description", UNSET)) + + def _parse_source_id(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + source_id = _parse_source_id(d.pop("source_id", UNSET)) + + def _parse_posted_at(data: object) -> datetime.datetime | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + posted_at_type_0 = isoparse(data) + + return posted_at_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(datetime.datetime | None | Unset, data) + + posted_at = _parse_posted_at(d.pop("posted_at", UNSET)) + + ledger_transaction_detail_response = cls( + id=id, + type_=type_, + amount=amount, + currency=currency, + date=date, + source=source, + status=status, + entries=entries, + number=number, + category=category, + due_date=due_date, + merchant_name=merchant_name, + reference_number=reference_number, + description=description, + source_id=source_id, + posted_at=posted_at, + ) + + ledger_transaction_detail_response.additional_properties = d + return ledger_transaction_detail_response + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/ledger_transaction_list_response.py b/robosystems_client/models/ledger_transaction_list_response.py new file mode 100644 index 0000000..e2f76d6 --- /dev/null +++ b/robosystems_client/models/ledger_transaction_list_response.py @@ -0,0 +1,92 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +if TYPE_CHECKING: + from ..models.ledger_transaction_summary_response import ( + LedgerTransactionSummaryResponse, + ) + from ..models.pagination_info import PaginationInfo + + +T = TypeVar("T", bound="LedgerTransactionListResponse") + + +@_attrs_define +class LedgerTransactionListResponse: + """ + Attributes: + transactions (list[LedgerTransactionSummaryResponse]): + pagination (PaginationInfo): Pagination information for list responses. Example: {'has_more': True, 'limit': 20, + 'offset': 0, 'total': 100}. + """ + + transactions: list[LedgerTransactionSummaryResponse] + pagination: PaginationInfo + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + transactions = [] + for transactions_item_data in self.transactions: + transactions_item = transactions_item_data.to_dict() + transactions.append(transactions_item) + + pagination = self.pagination.to_dict() + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "transactions": transactions, + "pagination": pagination, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.ledger_transaction_summary_response import ( + LedgerTransactionSummaryResponse, + ) + from ..models.pagination_info import PaginationInfo + + d = dict(src_dict) + transactions = [] + _transactions = d.pop("transactions") + for transactions_item_data in _transactions: + transactions_item = LedgerTransactionSummaryResponse.from_dict( + transactions_item_data + ) + + transactions.append(transactions_item) + + pagination = PaginationInfo.from_dict(d.pop("pagination")) + + ledger_transaction_list_response = cls( + transactions=transactions, + pagination=pagination, + ) + + ledger_transaction_list_response.additional_properties = d + return ledger_transaction_list_response + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/ledger_transaction_summary_response.py b/robosystems_client/models/ledger_transaction_summary_response.py new file mode 100644 index 0000000..e0ea30c --- /dev/null +++ b/robosystems_client/models/ledger_transaction_summary_response.py @@ -0,0 +1,243 @@ +from __future__ import annotations + +import datetime +from collections.abc import Mapping +from typing import Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field +from dateutil.parser import isoparse + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="LedgerTransactionSummaryResponse") + + +@_attrs_define +class LedgerTransactionSummaryResponse: + """ + Attributes: + id (str): + type_ (str): + amount (float): + currency (str): + date (datetime.date): + source (str): + status (str): + number (None | str | Unset): + category (None | str | Unset): + due_date (datetime.date | None | Unset): + merchant_name (None | str | Unset): + reference_number (None | str | Unset): + description (None | str | Unset): + """ + + id: str + type_: str + amount: float + currency: str + date: datetime.date + source: str + status: str + number: None | str | Unset = UNSET + category: None | str | Unset = UNSET + due_date: datetime.date | None | Unset = UNSET + merchant_name: None | str | Unset = UNSET + reference_number: None | str | Unset = UNSET + description: None | str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + id = self.id + + type_ = self.type_ + + amount = self.amount + + currency = self.currency + + date = self.date.isoformat() + + source = self.source + + status = self.status + + number: None | str | Unset + if isinstance(self.number, Unset): + number = UNSET + else: + number = self.number + + category: None | str | Unset + if isinstance(self.category, Unset): + category = UNSET + else: + category = self.category + + due_date: None | str | Unset + if isinstance(self.due_date, Unset): + due_date = UNSET + elif isinstance(self.due_date, datetime.date): + due_date = self.due_date.isoformat() + else: + due_date = self.due_date + + merchant_name: None | str | Unset + if isinstance(self.merchant_name, Unset): + merchant_name = UNSET + else: + merchant_name = self.merchant_name + + reference_number: None | str | Unset + if isinstance(self.reference_number, Unset): + reference_number = UNSET + else: + reference_number = self.reference_number + + description: None | str | Unset + if isinstance(self.description, Unset): + description = UNSET + else: + description = self.description + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "id": id, + "type": type_, + "amount": amount, + "currency": currency, + "date": date, + "source": source, + "status": status, + } + ) + if number is not UNSET: + field_dict["number"] = number + if category is not UNSET: + field_dict["category"] = category + if due_date is not UNSET: + field_dict["due_date"] = due_date + if merchant_name is not UNSET: + field_dict["merchant_name"] = merchant_name + if reference_number is not UNSET: + field_dict["reference_number"] = reference_number + if description is not UNSET: + field_dict["description"] = description + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + id = d.pop("id") + + type_ = d.pop("type") + + amount = d.pop("amount") + + currency = d.pop("currency") + + date = isoparse(d.pop("date")).date() + + source = d.pop("source") + + status = d.pop("status") + + def _parse_number(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + number = _parse_number(d.pop("number", UNSET)) + + def _parse_category(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + category = _parse_category(d.pop("category", UNSET)) + + def _parse_due_date(data: object) -> datetime.date | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + due_date_type_0 = isoparse(data).date() + + return due_date_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(datetime.date | None | Unset, data) + + due_date = _parse_due_date(d.pop("due_date", UNSET)) + + def _parse_merchant_name(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + merchant_name = _parse_merchant_name(d.pop("merchant_name", UNSET)) + + def _parse_reference_number(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + reference_number = _parse_reference_number(d.pop("reference_number", UNSET)) + + def _parse_description(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + description = _parse_description(d.pop("description", UNSET)) + + ledger_transaction_summary_response = cls( + id=id, + type_=type_, + amount=amount, + currency=currency, + date=date, + source=source, + status=status, + number=number, + category=category, + due_date=due_date, + merchant_name=merchant_name, + reference_number=reference_number, + description=description, + ) + + ledger_transaction_summary_response.additional_properties = d + return ledger_transaction_summary_response + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/pagination_info.py b/robosystems_client/models/pagination_info.py new file mode 100644 index 0000000..c57de71 --- /dev/null +++ b/robosystems_client/models/pagination_info.py @@ -0,0 +1,89 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +T = TypeVar("T", bound="PaginationInfo") + + +@_attrs_define +class PaginationInfo: + """Pagination information for list responses. + + Example: + {'has_more': True, 'limit': 20, 'offset': 0, 'total': 100} + + Attributes: + total (int): Total number of items available + limit (int): Maximum number of items returned in this response + offset (int): Number of items skipped + has_more (bool): Whether more items are available + """ + + total: int + limit: int + offset: int + has_more: bool + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + total = self.total + + limit = self.limit + + offset = self.offset + + has_more = self.has_more + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "total": total, + "limit": limit, + "offset": offset, + "has_more": has_more, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + total = d.pop("total") + + limit = d.pop("limit") + + offset = d.pop("offset") + + has_more = d.pop("has_more") + + pagination_info = cls( + total=total, + limit=limit, + offset=offset, + has_more=has_more, + ) + + pagination_info.additional_properties = d + return pagination_info + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/trial_balance_response.py b/robosystems_client/models/trial_balance_response.py new file mode 100644 index 0000000..2ec17d5 --- /dev/null +++ b/robosystems_client/models/trial_balance_response.py @@ -0,0 +1,91 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +if TYPE_CHECKING: + from ..models.trial_balance_row import TrialBalanceRow + + +T = TypeVar("T", bound="TrialBalanceResponse") + + +@_attrs_define +class TrialBalanceResponse: + """ + Attributes: + rows (list[TrialBalanceRow]): + total_debits (float): + total_credits (float): + """ + + rows: list[TrialBalanceRow] + total_debits: float + total_credits: float + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + rows = [] + for rows_item_data in self.rows: + rows_item = rows_item_data.to_dict() + rows.append(rows_item) + + total_debits = self.total_debits + + total_credits = self.total_credits + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "rows": rows, + "total_debits": total_debits, + "total_credits": total_credits, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.trial_balance_row import TrialBalanceRow + + d = dict(src_dict) + rows = [] + _rows = d.pop("rows") + for rows_item_data in _rows: + rows_item = TrialBalanceRow.from_dict(rows_item_data) + + rows.append(rows_item) + + total_debits = d.pop("total_debits") + + total_credits = d.pop("total_credits") + + trial_balance_response = cls( + rows=rows, + total_debits=total_debits, + total_credits=total_credits, + ) + + trial_balance_response.additional_properties = d + return trial_balance_response + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/trial_balance_row.py b/robosystems_client/models/trial_balance_row.py new file mode 100644 index 0000000..d0f056d --- /dev/null +++ b/robosystems_client/models/trial_balance_row.py @@ -0,0 +1,109 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +T = TypeVar("T", bound="TrialBalanceRow") + + +@_attrs_define +class TrialBalanceRow: + """ + Attributes: + account_id (str): + account_code (str): + account_name (str): + classification (str): + total_debits (float): + total_credits (float): + net_balance (float): + """ + + account_id: str + account_code: str + account_name: str + classification: str + total_debits: float + total_credits: float + net_balance: float + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + account_id = self.account_id + + account_code = self.account_code + + account_name = self.account_name + + classification = self.classification + + total_debits = self.total_debits + + total_credits = self.total_credits + + net_balance = self.net_balance + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "account_id": account_id, + "account_code": account_code, + "account_name": account_name, + "classification": classification, + "total_debits": total_debits, + "total_credits": total_credits, + "net_balance": net_balance, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + account_id = d.pop("account_id") + + account_code = d.pop("account_code") + + account_name = d.pop("account_name") + + classification = d.pop("classification") + + total_debits = d.pop("total_debits") + + total_credits = d.pop("total_credits") + + net_balance = d.pop("net_balance") + + trial_balance_row = cls( + account_id=account_id, + account_code=account_code, + account_name=account_name, + classification=classification, + total_debits=total_debits, + total_credits=total_credits, + net_balance=net_balance, + ) + + trial_balance_row.additional_properties = d + return trial_balance_row + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties From 443a615f59f47479b1d08e58e9ce4c0c7f0243c0 Mon Sep 17 00:00:00 2001 From: "Joseph T. French" Date: Sun, 22 Mar 2026 20:31:51 -0500 Subject: [PATCH 2/5] Add account type field to AccountResponse, AccountTreeNode, and TrialBalanceRow models ## Summary This commit enhances the `AccountResponse`, `AccountTreeNode`, and `TrialBalanceRow` models by introducing a new `account_type` field, allowing for better categorization and management of account-related data. ## Key Accomplishments - **New Field Addition**: Added `account_type` to `AccountResponse`, `AccountTreeNode`, and `TrialBalanceRow` models, providing additional context for account classification. - **Updated Serialization Logic**: Adjusted serialization and deserialization methods to handle the new `account_type` field appropriately. ## Changes Breakdown - Modified `account_response.py`, `account_tree_node.py`, and `trial_balance_row.py` to include the new field and its associated logic. - Updated `__init__.py` to ensure the new models are included in the public API. ## Testing Notes - Verify that the new `account_type` field is correctly serialized and deserialized in API responses. - Ensure backward compatibility with existing data structures that do not include the `account_type` field. ## Infrastructure Considerations - No breaking changes introduced; existing functionality remains intact. --- robosystems_client/api/documents/__init__.py | 1 + .../api/documents/delete_document.py | 180 +++++++ .../api/documents/list_documents.py | 194 ++++++++ .../api/documents/upload_document.py | 190 ++++++++ .../api/documents/upload_documents_bulk.py | 190 ++++++++ robosystems_client/api/search/__init__.py | 1 + .../api/search/get_document_section.py | 182 ++++++++ .../api/search/search_documents.py | 190 ++++++++ robosystems_client/models/__init__.py | 24 + robosystems_client/models/account_response.py | 20 + .../models/account_tree_node.py | 22 +- .../models/bulk_document_upload_request.py | 76 +++ .../models/bulk_document_upload_response.py | 143 ++++++ ...ment_upload_response_errors_type_0_item.py | 46 ++ .../models/document_list_item.py | 151 ++++++ .../models/document_list_response.py | 92 ++++ robosystems_client/models/document_section.py | 439 ++++++++++++++++++ .../models/document_upload_request.py | 143 ++++++ .../models/document_upload_response.py | 86 ++++ robosystems_client/models/search_hit.py | 379 +++++++++++++++ robosystems_client/models/search_request.py | 252 ++++++++++ robosystems_client/models/search_response.py | 100 ++++ .../models/trial_balance_row.py | 24 +- 23 files changed, 3123 insertions(+), 2 deletions(-) create mode 100644 robosystems_client/api/documents/__init__.py create mode 100644 robosystems_client/api/documents/delete_document.py create mode 100644 robosystems_client/api/documents/list_documents.py create mode 100644 robosystems_client/api/documents/upload_document.py create mode 100644 robosystems_client/api/documents/upload_documents_bulk.py create mode 100644 robosystems_client/api/search/__init__.py create mode 100644 robosystems_client/api/search/get_document_section.py create mode 100644 robosystems_client/api/search/search_documents.py create mode 100644 robosystems_client/models/bulk_document_upload_request.py create mode 100644 robosystems_client/models/bulk_document_upload_response.py create mode 100644 robosystems_client/models/bulk_document_upload_response_errors_type_0_item.py create mode 100644 robosystems_client/models/document_list_item.py create mode 100644 robosystems_client/models/document_list_response.py create mode 100644 robosystems_client/models/document_section.py create mode 100644 robosystems_client/models/document_upload_request.py create mode 100644 robosystems_client/models/document_upload_response.py create mode 100644 robosystems_client/models/search_hit.py create mode 100644 robosystems_client/models/search_request.py create mode 100644 robosystems_client/models/search_response.py diff --git a/robosystems_client/api/documents/__init__.py b/robosystems_client/api/documents/__init__.py new file mode 100644 index 0000000..2d7c0b2 --- /dev/null +++ b/robosystems_client/api/documents/__init__.py @@ -0,0 +1 @@ +"""Contains endpoint functions for accessing the API""" diff --git a/robosystems_client/api/documents/delete_document.py b/robosystems_client/api/documents/delete_document.py new file mode 100644 index 0000000..84de85c --- /dev/null +++ b/robosystems_client/api/documents/delete_document.py @@ -0,0 +1,180 @@ +from http import HTTPStatus +from typing import Any, cast +from urllib.parse import quote + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.http_validation_error import HTTPValidationError +from ...types import Response + + +def _get_kwargs( + graph_id: str, + document_id: str, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "method": "delete", + "url": "/v1/graphs/{graph_id}/documents/{document_id}".format( + graph_id=quote(str(graph_id), safe=""), + document_id=quote(str(document_id), safe=""), + ), + } + + return _kwargs + + +def _parse_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | HTTPValidationError | None: + if response.status_code == 204: + response_204 = cast(Any, None) + return response_204 + + if response.status_code == 422: + response_422 = HTTPValidationError.from_dict(response.json()) + + return response_422 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | HTTPValidationError]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + graph_id: str, + document_id: str, + *, + client: AuthenticatedClient, +) -> Response[Any | HTTPValidationError]: + """Delete Document + + Delete a document and all its sections. + + Args: + graph_id (str): + document_id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any | HTTPValidationError] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + document_id=document_id, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + graph_id: str, + document_id: str, + *, + client: AuthenticatedClient, +) -> Any | HTTPValidationError | None: + """Delete Document + + Delete a document and all its sections. + + Args: + graph_id (str): + document_id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Any | HTTPValidationError + """ + + return sync_detailed( + graph_id=graph_id, + document_id=document_id, + client=client, + ).parsed + + +async def asyncio_detailed( + graph_id: str, + document_id: str, + *, + client: AuthenticatedClient, +) -> Response[Any | HTTPValidationError]: + """Delete Document + + Delete a document and all its sections. + + Args: + graph_id (str): + document_id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any | HTTPValidationError] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + document_id=document_id, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + graph_id: str, + document_id: str, + *, + client: AuthenticatedClient, +) -> Any | HTTPValidationError | None: + """Delete Document + + Delete a document and all its sections. + + Args: + graph_id (str): + document_id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Any | HTTPValidationError + """ + + return ( + await asyncio_detailed( + graph_id=graph_id, + document_id=document_id, + client=client, + ) + ).parsed diff --git a/robosystems_client/api/documents/list_documents.py b/robosystems_client/api/documents/list_documents.py new file mode 100644 index 0000000..c762a42 --- /dev/null +++ b/robosystems_client/api/documents/list_documents.py @@ -0,0 +1,194 @@ +from http import HTTPStatus +from typing import Any +from urllib.parse import quote + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.document_list_response import DocumentListResponse +from ...models.http_validation_error import HTTPValidationError +from ...types import UNSET, Response, Unset + + +def _get_kwargs( + graph_id: str, + *, + source_type: None | str | Unset = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} + + json_source_type: None | str | Unset + if isinstance(source_type, Unset): + json_source_type = UNSET + else: + json_source_type = source_type + params["source_type"] = json_source_type + + params = {k: v for k, v in params.items() if v is not UNSET and v is not None} + + _kwargs: dict[str, Any] = { + "method": "get", + "url": "/v1/graphs/{graph_id}/documents".format( + graph_id=quote(str(graph_id), safe=""), + ), + "params": params, + } + + return _kwargs + + +def _parse_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> DocumentListResponse | HTTPValidationError | None: + if response.status_code == 200: + response_200 = DocumentListResponse.from_dict(response.json()) + + return response_200 + + if response.status_code == 422: + response_422 = HTTPValidationError.from_dict(response.json()) + + return response_422 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[DocumentListResponse | HTTPValidationError]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + graph_id: str, + *, + client: AuthenticatedClient, + source_type: None | str | Unset = UNSET, +) -> Response[DocumentListResponse | HTTPValidationError]: + """List Documents + + List indexed documents for a graph. + + Args: + graph_id (str): + source_type (None | str | Unset): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[DocumentListResponse | HTTPValidationError] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + source_type=source_type, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + graph_id: str, + *, + client: AuthenticatedClient, + source_type: None | str | Unset = UNSET, +) -> DocumentListResponse | HTTPValidationError | None: + """List Documents + + List indexed documents for a graph. + + Args: + graph_id (str): + source_type (None | str | Unset): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + DocumentListResponse | HTTPValidationError + """ + + return sync_detailed( + graph_id=graph_id, + client=client, + source_type=source_type, + ).parsed + + +async def asyncio_detailed( + graph_id: str, + *, + client: AuthenticatedClient, + source_type: None | str | Unset = UNSET, +) -> Response[DocumentListResponse | HTTPValidationError]: + """List Documents + + List indexed documents for a graph. + + Args: + graph_id (str): + source_type (None | str | Unset): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[DocumentListResponse | HTTPValidationError] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + source_type=source_type, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + graph_id: str, + *, + client: AuthenticatedClient, + source_type: None | str | Unset = UNSET, +) -> DocumentListResponse | HTTPValidationError | None: + """List Documents + + List indexed documents for a graph. + + Args: + graph_id (str): + source_type (None | str | Unset): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + DocumentListResponse | HTTPValidationError + """ + + return ( + await asyncio_detailed( + graph_id=graph_id, + client=client, + source_type=source_type, + ) + ).parsed diff --git a/robosystems_client/api/documents/upload_document.py b/robosystems_client/api/documents/upload_document.py new file mode 100644 index 0000000..54d72d8 --- /dev/null +++ b/robosystems_client/api/documents/upload_document.py @@ -0,0 +1,190 @@ +from http import HTTPStatus +from typing import Any +from urllib.parse import quote + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.document_upload_request import DocumentUploadRequest +from ...models.document_upload_response import DocumentUploadResponse +from ...models.http_validation_error import HTTPValidationError +from ...types import Response + + +def _get_kwargs( + graph_id: str, + *, + body: DocumentUploadRequest, +) -> dict[str, Any]: + headers: dict[str, Any] = {} + + _kwargs: dict[str, Any] = { + "method": "post", + "url": "/v1/graphs/{graph_id}/documents".format( + graph_id=quote(str(graph_id), safe=""), + ), + } + + _kwargs["json"] = body.to_dict() + + headers["Content-Type"] = "application/json" + + _kwargs["headers"] = headers + return _kwargs + + +def _parse_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> DocumentUploadResponse | HTTPValidationError | None: + if response.status_code == 200: + response_200 = DocumentUploadResponse.from_dict(response.json()) + + return response_200 + + if response.status_code == 422: + response_422 = HTTPValidationError.from_dict(response.json()) + + return response_422 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[DocumentUploadResponse | HTTPValidationError]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + graph_id: str, + *, + client: AuthenticatedClient, + body: DocumentUploadRequest, +) -> Response[DocumentUploadResponse | HTTPValidationError]: + """Upload Document + + Upload a markdown document for text indexing. + + Args: + graph_id (str): + body (DocumentUploadRequest): Upload a markdown document for text indexing. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[DocumentUploadResponse | HTTPValidationError] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + body=body, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + graph_id: str, + *, + client: AuthenticatedClient, + body: DocumentUploadRequest, +) -> DocumentUploadResponse | HTTPValidationError | None: + """Upload Document + + Upload a markdown document for text indexing. + + Args: + graph_id (str): + body (DocumentUploadRequest): Upload a markdown document for text indexing. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + DocumentUploadResponse | HTTPValidationError + """ + + return sync_detailed( + graph_id=graph_id, + client=client, + body=body, + ).parsed + + +async def asyncio_detailed( + graph_id: str, + *, + client: AuthenticatedClient, + body: DocumentUploadRequest, +) -> Response[DocumentUploadResponse | HTTPValidationError]: + """Upload Document + + Upload a markdown document for text indexing. + + Args: + graph_id (str): + body (DocumentUploadRequest): Upload a markdown document for text indexing. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[DocumentUploadResponse | HTTPValidationError] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + body=body, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + graph_id: str, + *, + client: AuthenticatedClient, + body: DocumentUploadRequest, +) -> DocumentUploadResponse | HTTPValidationError | None: + """Upload Document + + Upload a markdown document for text indexing. + + Args: + graph_id (str): + body (DocumentUploadRequest): Upload a markdown document for text indexing. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + DocumentUploadResponse | HTTPValidationError + """ + + return ( + await asyncio_detailed( + graph_id=graph_id, + client=client, + body=body, + ) + ).parsed diff --git a/robosystems_client/api/documents/upload_documents_bulk.py b/robosystems_client/api/documents/upload_documents_bulk.py new file mode 100644 index 0000000..9f2f2e7 --- /dev/null +++ b/robosystems_client/api/documents/upload_documents_bulk.py @@ -0,0 +1,190 @@ +from http import HTTPStatus +from typing import Any +from urllib.parse import quote + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.bulk_document_upload_request import BulkDocumentUploadRequest +from ...models.bulk_document_upload_response import BulkDocumentUploadResponse +from ...models.http_validation_error import HTTPValidationError +from ...types import Response + + +def _get_kwargs( + graph_id: str, + *, + body: BulkDocumentUploadRequest, +) -> dict[str, Any]: + headers: dict[str, Any] = {} + + _kwargs: dict[str, Any] = { + "method": "post", + "url": "/v1/graphs/{graph_id}/documents/bulk".format( + graph_id=quote(str(graph_id), safe=""), + ), + } + + _kwargs["json"] = body.to_dict() + + headers["Content-Type"] = "application/json" + + _kwargs["headers"] = headers + return _kwargs + + +def _parse_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> BulkDocumentUploadResponse | HTTPValidationError | None: + if response.status_code == 200: + response_200 = BulkDocumentUploadResponse.from_dict(response.json()) + + return response_200 + + if response.status_code == 422: + response_422 = HTTPValidationError.from_dict(response.json()) + + return response_422 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[BulkDocumentUploadResponse | HTTPValidationError]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + graph_id: str, + *, + client: AuthenticatedClient, + body: BulkDocumentUploadRequest, +) -> Response[BulkDocumentUploadResponse | HTTPValidationError]: + """Upload Documents Bulk + + Upload multiple markdown documents for text indexing (max 50). + + Args: + graph_id (str): + body (BulkDocumentUploadRequest): Bulk upload multiple markdown documents. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[BulkDocumentUploadResponse | HTTPValidationError] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + body=body, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + graph_id: str, + *, + client: AuthenticatedClient, + body: BulkDocumentUploadRequest, +) -> BulkDocumentUploadResponse | HTTPValidationError | None: + """Upload Documents Bulk + + Upload multiple markdown documents for text indexing (max 50). + + Args: + graph_id (str): + body (BulkDocumentUploadRequest): Bulk upload multiple markdown documents. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + BulkDocumentUploadResponse | HTTPValidationError + """ + + return sync_detailed( + graph_id=graph_id, + client=client, + body=body, + ).parsed + + +async def asyncio_detailed( + graph_id: str, + *, + client: AuthenticatedClient, + body: BulkDocumentUploadRequest, +) -> Response[BulkDocumentUploadResponse | HTTPValidationError]: + """Upload Documents Bulk + + Upload multiple markdown documents for text indexing (max 50). + + Args: + graph_id (str): + body (BulkDocumentUploadRequest): Bulk upload multiple markdown documents. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[BulkDocumentUploadResponse | HTTPValidationError] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + body=body, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + graph_id: str, + *, + client: AuthenticatedClient, + body: BulkDocumentUploadRequest, +) -> BulkDocumentUploadResponse | HTTPValidationError | None: + """Upload Documents Bulk + + Upload multiple markdown documents for text indexing (max 50). + + Args: + graph_id (str): + body (BulkDocumentUploadRequest): Bulk upload multiple markdown documents. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + BulkDocumentUploadResponse | HTTPValidationError + """ + + return ( + await asyncio_detailed( + graph_id=graph_id, + client=client, + body=body, + ) + ).parsed diff --git a/robosystems_client/api/search/__init__.py b/robosystems_client/api/search/__init__.py new file mode 100644 index 0000000..2d7c0b2 --- /dev/null +++ b/robosystems_client/api/search/__init__.py @@ -0,0 +1 @@ +"""Contains endpoint functions for accessing the API""" diff --git a/robosystems_client/api/search/get_document_section.py b/robosystems_client/api/search/get_document_section.py new file mode 100644 index 0000000..00d5e60 --- /dev/null +++ b/robosystems_client/api/search/get_document_section.py @@ -0,0 +1,182 @@ +from http import HTTPStatus +from typing import Any +from urllib.parse import quote + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.document_section import DocumentSection +from ...models.http_validation_error import HTTPValidationError +from ...types import Response + + +def _get_kwargs( + graph_id: str, + document_id: str, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "method": "get", + "url": "/v1/graphs/{graph_id}/search/{document_id}".format( + graph_id=quote(str(graph_id), safe=""), + document_id=quote(str(document_id), safe=""), + ), + } + + return _kwargs + + +def _parse_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> DocumentSection | HTTPValidationError | None: + if response.status_code == 200: + response_200 = DocumentSection.from_dict(response.json()) + + return response_200 + + if response.status_code == 422: + response_422 = HTTPValidationError.from_dict(response.json()) + + return response_422 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[DocumentSection | HTTPValidationError]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + graph_id: str, + document_id: str, + *, + client: AuthenticatedClient, +) -> Response[DocumentSection | HTTPValidationError]: + """Get Document Section + + Retrieve the full text of a document section by ID. + + Args: + graph_id (str): + document_id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[DocumentSection | HTTPValidationError] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + document_id=document_id, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + graph_id: str, + document_id: str, + *, + client: AuthenticatedClient, +) -> DocumentSection | HTTPValidationError | None: + """Get Document Section + + Retrieve the full text of a document section by ID. + + Args: + graph_id (str): + document_id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + DocumentSection | HTTPValidationError + """ + + return sync_detailed( + graph_id=graph_id, + document_id=document_id, + client=client, + ).parsed + + +async def asyncio_detailed( + graph_id: str, + document_id: str, + *, + client: AuthenticatedClient, +) -> Response[DocumentSection | HTTPValidationError]: + """Get Document Section + + Retrieve the full text of a document section by ID. + + Args: + graph_id (str): + document_id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[DocumentSection | HTTPValidationError] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + document_id=document_id, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + graph_id: str, + document_id: str, + *, + client: AuthenticatedClient, +) -> DocumentSection | HTTPValidationError | None: + """Get Document Section + + Retrieve the full text of a document section by ID. + + Args: + graph_id (str): + document_id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + DocumentSection | HTTPValidationError + """ + + return ( + await asyncio_detailed( + graph_id=graph_id, + document_id=document_id, + client=client, + ) + ).parsed diff --git a/robosystems_client/api/search/search_documents.py b/robosystems_client/api/search/search_documents.py new file mode 100644 index 0000000..57d0ccd --- /dev/null +++ b/robosystems_client/api/search/search_documents.py @@ -0,0 +1,190 @@ +from http import HTTPStatus +from typing import Any +from urllib.parse import quote + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.http_validation_error import HTTPValidationError +from ...models.search_request import SearchRequest +from ...models.search_response import SearchResponse +from ...types import Response + + +def _get_kwargs( + graph_id: str, + *, + body: SearchRequest, +) -> dict[str, Any]: + headers: dict[str, Any] = {} + + _kwargs: dict[str, Any] = { + "method": "post", + "url": "/v1/graphs/{graph_id}/search".format( + graph_id=quote(str(graph_id), safe=""), + ), + } + + _kwargs["json"] = body.to_dict() + + headers["Content-Type"] = "application/json" + + _kwargs["headers"] = headers + return _kwargs + + +def _parse_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HTTPValidationError | SearchResponse | None: + if response.status_code == 200: + response_200 = SearchResponse.from_dict(response.json()) + + return response_200 + + if response.status_code == 422: + response_422 = HTTPValidationError.from_dict(response.json()) + + return response_422 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[HTTPValidationError | SearchResponse]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + graph_id: str, + *, + client: AuthenticatedClient, + body: SearchRequest, +) -> Response[HTTPValidationError | SearchResponse]: + """Search Documents + + Search filing narratives and text content within a graph. + + Args: + graph_id (str): + body (SearchRequest): Request model for document search. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[HTTPValidationError | SearchResponse] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + body=body, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + graph_id: str, + *, + client: AuthenticatedClient, + body: SearchRequest, +) -> HTTPValidationError | SearchResponse | None: + """Search Documents + + Search filing narratives and text content within a graph. + + Args: + graph_id (str): + body (SearchRequest): Request model for document search. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + HTTPValidationError | SearchResponse + """ + + return sync_detailed( + graph_id=graph_id, + client=client, + body=body, + ).parsed + + +async def asyncio_detailed( + graph_id: str, + *, + client: AuthenticatedClient, + body: SearchRequest, +) -> Response[HTTPValidationError | SearchResponse]: + """Search Documents + + Search filing narratives and text content within a graph. + + Args: + graph_id (str): + body (SearchRequest): Request model for document search. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[HTTPValidationError | SearchResponse] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + body=body, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + graph_id: str, + *, + client: AuthenticatedClient, + body: SearchRequest, +) -> HTTPValidationError | SearchResponse | None: + """Search Documents + + Search filing narratives and text content within a graph. + + Args: + graph_id (str): + body (SearchRequest): Request model for document search. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + HTTPValidationError | SearchResponse + """ + + return ( + await asyncio_detailed( + graph_id=graph_id, + client=client, + body=body, + ) + ).parsed diff --git a/robosystems_client/models/__init__.py b/robosystems_client/models/__init__.py index e55770e..d13faf5 100644 --- a/robosystems_client/models/__init__.py +++ b/robosystems_client/models/__init__.py @@ -44,6 +44,11 @@ from .batch_agent_request import BatchAgentRequest from .batch_agent_response import BatchAgentResponse from .billing_customer import BillingCustomer +from .bulk_document_upload_request import BulkDocumentUploadRequest +from .bulk_document_upload_response import BulkDocumentUploadResponse +from .bulk_document_upload_response_errors_type_0_item import ( + BulkDocumentUploadResponseErrorsType0Item, +) from .cancel_operation_response_canceloperation import ( CancelOperationResponseCanceloperation, ) @@ -93,6 +98,11 @@ DetailedTransactionsResponseDateRange, ) from .detailed_transactions_response_summary import DetailedTransactionsResponseSummary +from .document_list_item import DocumentListItem +from .document_list_response import DocumentListResponse +from .document_section import DocumentSection +from .document_upload_request import DocumentUploadRequest +from .document_upload_response import DocumentUploadResponse from .download_quota import DownloadQuota from .email_verification_request import EmailVerificationRequest from .enhanced_credit_transaction_response import EnhancedCreditTransactionResponse @@ -241,6 +251,9 @@ SchemaValidationResponseCompatibilityType0, ) from .schema_validation_response_stats_type_0 import SchemaValidationResponseStatsType0 +from .search_hit import SearchHit +from .search_request import SearchRequest +from .search_response import SearchResponse from .sec_connection_config import SECConnectionConfig from .selection_criteria import SelectionCriteria from .service_offering_summary import ServiceOfferingSummary @@ -333,6 +346,9 @@ "BatchAgentRequest", "BatchAgentResponse", "BillingCustomer", + "BulkDocumentUploadRequest", + "BulkDocumentUploadResponse", + "BulkDocumentUploadResponseErrorsType0Item", "CancelOperationResponseCanceloperation", "CheckCreditBalanceResponseCheckcreditbalance", "CheckoutResponse", @@ -374,6 +390,11 @@ "DetailedTransactionsResponse", "DetailedTransactionsResponseDateRange", "DetailedTransactionsResponseSummary", + "DocumentListItem", + "DocumentListResponse", + "DocumentSection", + "DocumentUploadRequest", + "DocumentUploadResponse", "DownloadQuota", "EmailVerificationRequest", "EnhancedCreditTransactionResponse", @@ -496,6 +517,9 @@ "SchemaValidationResponse", "SchemaValidationResponseCompatibilityType0", "SchemaValidationResponseStatsType0", + "SearchHit", + "SearchRequest", + "SearchResponse", "SECConnectionConfig", "SelectionCriteria", "ServiceOfferingsResponse", diff --git a/robosystems_client/models/account_response.py b/robosystems_client/models/account_response.py index accc8e2..7d3d333 100644 --- a/robosystems_client/models/account_response.py +++ b/robosystems_client/models/account_response.py @@ -27,6 +27,7 @@ class AccountResponse: description (None | str | Unset): sub_classification (None | str | Unset): parent_id (None | str | Unset): + account_type (None | str | Unset): external_id (None | str | Unset): external_source (None | str | Unset): """ @@ -43,6 +44,7 @@ class AccountResponse: description: None | str | Unset = UNSET sub_classification: None | str | Unset = UNSET parent_id: None | str | Unset = UNSET + account_type: None | str | Unset = UNSET external_id: None | str | Unset = UNSET external_source: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) @@ -84,6 +86,12 @@ def to_dict(self) -> dict[str, Any]: else: parent_id = self.parent_id + account_type: None | str | Unset + if isinstance(self.account_type, Unset): + account_type = UNSET + else: + account_type = self.account_type + external_id: None | str | Unset if isinstance(self.external_id, Unset): external_id = UNSET @@ -117,6 +125,8 @@ def to_dict(self) -> dict[str, Any]: field_dict["sub_classification"] = sub_classification if parent_id is not UNSET: field_dict["parent_id"] = parent_id + if account_type is not UNSET: + field_dict["account_type"] = account_type if external_id is not UNSET: field_dict["external_id"] = external_id if external_source is not UNSET: @@ -172,6 +182,15 @@ def _parse_parent_id(data: object) -> None | str | Unset: parent_id = _parse_parent_id(d.pop("parent_id", UNSET)) + def _parse_account_type(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + account_type = _parse_account_type(d.pop("account_type", UNSET)) + def _parse_external_id(data: object) -> None | str | Unset: if data is None: return data @@ -203,6 +222,7 @@ def _parse_external_source(data: object) -> None | str | Unset: description=description, sub_classification=sub_classification, parent_id=parent_id, + account_type=account_type, external_id=external_id, external_source=external_source, ) diff --git a/robosystems_client/models/account_tree_node.py b/robosystems_client/models/account_tree_node.py index 7a2062e..4982b61 100644 --- a/robosystems_client/models/account_tree_node.py +++ b/robosystems_client/models/account_tree_node.py @@ -1,7 +1,7 @@ from __future__ import annotations from collections.abc import Mapping -from typing import Any, TypeVar +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -22,6 +22,7 @@ class AccountTreeNode: balance_type (str): depth (int): is_active (bool): + account_type (None | str | Unset): children (list[AccountTreeNode] | Unset): """ @@ -32,6 +33,7 @@ class AccountTreeNode: balance_type: str depth: int is_active: bool + account_type: None | str | Unset = UNSET children: list[AccountTreeNode] | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) @@ -50,6 +52,12 @@ def to_dict(self) -> dict[str, Any]: is_active = self.is_active + account_type: None | str | Unset + if isinstance(self.account_type, Unset): + account_type = UNSET + else: + account_type = self.account_type + children: list[dict[str, Any]] | Unset = UNSET if not isinstance(self.children, Unset): children = [] @@ -70,6 +78,8 @@ def to_dict(self) -> dict[str, Any]: "is_active": is_active, } ) + if account_type is not UNSET: + field_dict["account_type"] = account_type if children is not UNSET: field_dict["children"] = children @@ -92,6 +102,15 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: is_active = d.pop("is_active") + def _parse_account_type(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + account_type = _parse_account_type(d.pop("account_type", UNSET)) + _children = d.pop("children", UNSET) children: list[AccountTreeNode] | Unset = UNSET if _children is not UNSET: @@ -109,6 +128,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: balance_type=balance_type, depth=depth, is_active=is_active, + account_type=account_type, children=children, ) diff --git a/robosystems_client/models/bulk_document_upload_request.py b/robosystems_client/models/bulk_document_upload_request.py new file mode 100644 index 0000000..9c4d294 --- /dev/null +++ b/robosystems_client/models/bulk_document_upload_request.py @@ -0,0 +1,76 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +if TYPE_CHECKING: + from ..models.document_upload_request import DocumentUploadRequest + + +T = TypeVar("T", bound="BulkDocumentUploadRequest") + + +@_attrs_define +class BulkDocumentUploadRequest: + """Bulk upload multiple markdown documents. + + Attributes: + documents (list[DocumentUploadRequest]): Documents to upload (max 50) + """ + + documents: list[DocumentUploadRequest] + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + documents = [] + for documents_item_data in self.documents: + documents_item = documents_item_data.to_dict() + documents.append(documents_item) + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "documents": documents, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.document_upload_request import DocumentUploadRequest + + d = dict(src_dict) + documents = [] + _documents = d.pop("documents") + for documents_item_data in _documents: + documents_item = DocumentUploadRequest.from_dict(documents_item_data) + + documents.append(documents_item) + + bulk_document_upload_request = cls( + documents=documents, + ) + + bulk_document_upload_request.additional_properties = d + return bulk_document_upload_request + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/bulk_document_upload_response.py b/robosystems_client/models/bulk_document_upload_response.py new file mode 100644 index 0000000..f87a95c --- /dev/null +++ b/robosystems_client/models/bulk_document_upload_response.py @@ -0,0 +1,143 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +if TYPE_CHECKING: + from ..models.bulk_document_upload_response_errors_type_0_item import ( + BulkDocumentUploadResponseErrorsType0Item, + ) + from ..models.document_upload_response import DocumentUploadResponse + + +T = TypeVar("T", bound="BulkDocumentUploadResponse") + + +@_attrs_define +class BulkDocumentUploadResponse: + """Response from bulk document upload. + + Attributes: + total_documents (int): + total_sections_indexed (int): + results (list[DocumentUploadResponse]): + errors (list[BulkDocumentUploadResponseErrorsType0Item] | None | Unset): + """ + + total_documents: int + total_sections_indexed: int + results: list[DocumentUploadResponse] + errors: list[BulkDocumentUploadResponseErrorsType0Item] | None | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + total_documents = self.total_documents + + total_sections_indexed = self.total_sections_indexed + + results = [] + for results_item_data in self.results: + results_item = results_item_data.to_dict() + results.append(results_item) + + errors: list[dict[str, Any]] | None | Unset + if isinstance(self.errors, Unset): + errors = UNSET + elif isinstance(self.errors, list): + errors = [] + for errors_type_0_item_data in self.errors: + errors_type_0_item = errors_type_0_item_data.to_dict() + errors.append(errors_type_0_item) + + else: + errors = self.errors + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "total_documents": total_documents, + "total_sections_indexed": total_sections_indexed, + "results": results, + } + ) + if errors is not UNSET: + field_dict["errors"] = errors + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.bulk_document_upload_response_errors_type_0_item import ( + BulkDocumentUploadResponseErrorsType0Item, + ) + from ..models.document_upload_response import DocumentUploadResponse + + d = dict(src_dict) + total_documents = d.pop("total_documents") + + total_sections_indexed = d.pop("total_sections_indexed") + + results = [] + _results = d.pop("results") + for results_item_data in _results: + results_item = DocumentUploadResponse.from_dict(results_item_data) + + results.append(results_item) + + def _parse_errors( + data: object, + ) -> list[BulkDocumentUploadResponseErrorsType0Item] | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, list): + raise TypeError() + errors_type_0 = [] + _errors_type_0 = data + for errors_type_0_item_data in _errors_type_0: + errors_type_0_item = BulkDocumentUploadResponseErrorsType0Item.from_dict( + errors_type_0_item_data + ) + + errors_type_0.append(errors_type_0_item) + + return errors_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(list[BulkDocumentUploadResponseErrorsType0Item] | None | Unset, data) + + errors = _parse_errors(d.pop("errors", UNSET)) + + bulk_document_upload_response = cls( + total_documents=total_documents, + total_sections_indexed=total_sections_indexed, + results=results, + errors=errors, + ) + + bulk_document_upload_response.additional_properties = d + return bulk_document_upload_response + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/bulk_document_upload_response_errors_type_0_item.py b/robosystems_client/models/bulk_document_upload_response_errors_type_0_item.py new file mode 100644 index 0000000..97d3d7c --- /dev/null +++ b/robosystems_client/models/bulk_document_upload_response_errors_type_0_item.py @@ -0,0 +1,46 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +T = TypeVar("T", bound="BulkDocumentUploadResponseErrorsType0Item") + + +@_attrs_define +class BulkDocumentUploadResponseErrorsType0Item: + """ """ + + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + bulk_document_upload_response_errors_type_0_item = cls() + + bulk_document_upload_response_errors_type_0_item.additional_properties = d + return bulk_document_upload_response_errors_type_0_item + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/document_list_item.py b/robosystems_client/models/document_list_item.py new file mode 100644 index 0000000..60409d5 --- /dev/null +++ b/robosystems_client/models/document_list_item.py @@ -0,0 +1,151 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="DocumentListItem") + + +@_attrs_define +class DocumentListItem: + """A document in the document list. + + Attributes: + document_title (str): + section_count (int): + source_type (str): + folder (None | str | Unset): + tags (list[str] | None | Unset): + last_indexed (None | str | Unset): + """ + + document_title: str + section_count: int + source_type: str + folder: None | str | Unset = UNSET + tags: list[str] | None | Unset = UNSET + last_indexed: None | str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + document_title = self.document_title + + section_count = self.section_count + + source_type = self.source_type + + folder: None | str | Unset + if isinstance(self.folder, Unset): + folder = UNSET + else: + folder = self.folder + + tags: list[str] | None | Unset + if isinstance(self.tags, Unset): + tags = UNSET + elif isinstance(self.tags, list): + tags = self.tags + + else: + tags = self.tags + + last_indexed: None | str | Unset + if isinstance(self.last_indexed, Unset): + last_indexed = UNSET + else: + last_indexed = self.last_indexed + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "document_title": document_title, + "section_count": section_count, + "source_type": source_type, + } + ) + if folder is not UNSET: + field_dict["folder"] = folder + if tags is not UNSET: + field_dict["tags"] = tags + if last_indexed is not UNSET: + field_dict["last_indexed"] = last_indexed + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + document_title = d.pop("document_title") + + section_count = d.pop("section_count") + + source_type = d.pop("source_type") + + def _parse_folder(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + folder = _parse_folder(d.pop("folder", UNSET)) + + def _parse_tags(data: object) -> list[str] | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, list): + raise TypeError() + tags_type_0 = cast(list[str], data) + + return tags_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(list[str] | None | Unset, data) + + tags = _parse_tags(d.pop("tags", UNSET)) + + def _parse_last_indexed(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + last_indexed = _parse_last_indexed(d.pop("last_indexed", UNSET)) + + document_list_item = cls( + document_title=document_title, + section_count=section_count, + source_type=source_type, + folder=folder, + tags=tags, + last_indexed=last_indexed, + ) + + document_list_item.additional_properties = d + return document_list_item + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/document_list_response.py b/robosystems_client/models/document_list_response.py new file mode 100644 index 0000000..bc4a828 --- /dev/null +++ b/robosystems_client/models/document_list_response.py @@ -0,0 +1,92 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +if TYPE_CHECKING: + from ..models.document_list_item import DocumentListItem + + +T = TypeVar("T", bound="DocumentListResponse") + + +@_attrs_define +class DocumentListResponse: + """Response from listing indexed documents. + + Attributes: + total (int): + documents (list[DocumentListItem]): + graph_id (str): + """ + + total: int + documents: list[DocumentListItem] + graph_id: str + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + total = self.total + + documents = [] + for documents_item_data in self.documents: + documents_item = documents_item_data.to_dict() + documents.append(documents_item) + + graph_id = self.graph_id + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "total": total, + "documents": documents, + "graph_id": graph_id, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.document_list_item import DocumentListItem + + d = dict(src_dict) + total = d.pop("total") + + documents = [] + _documents = d.pop("documents") + for documents_item_data in _documents: + documents_item = DocumentListItem.from_dict(documents_item_data) + + documents.append(documents_item) + + graph_id = d.pop("graph_id") + + document_list_response = cls( + total=total, + documents=documents, + graph_id=graph_id, + ) + + document_list_response.additional_properties = d + return document_list_response + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/document_section.py b/robosystems_client/models/document_section.py new file mode 100644 index 0000000..7e933d7 --- /dev/null +++ b/robosystems_client/models/document_section.py @@ -0,0 +1,439 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="DocumentSection") + + +@_attrs_define +class DocumentSection: + """Full document section retrieved by ID. + + Attributes: + document_id (str): + graph_id (str): + source_type (str): + content (str): + entity_ticker (None | str | Unset): + entity_name (None | str | Unset): + entity_cik (None | str | Unset): + section_label (None | str | Unset): + section_id (None | str | Unset): + element_qname (None | str | Unset): + filing_date (None | str | Unset): + fiscal_year (int | None | Unset): + fiscal_period (None | str | Unset): + form_type (None | str | Unset): + accession_number (None | str | Unset): + xbrl_elements (list[str] | None | Unset): + content_url (None | str | Unset): + content_length (int | Unset): Default: 0. + document_title (None | str | Unset): + tags (list[str] | None | Unset): + folder (None | str | Unset): + """ + + document_id: str + graph_id: str + source_type: str + content: str + entity_ticker: None | str | Unset = UNSET + entity_name: None | str | Unset = UNSET + entity_cik: None | str | Unset = UNSET + section_label: None | str | Unset = UNSET + section_id: None | str | Unset = UNSET + element_qname: None | str | Unset = UNSET + filing_date: None | str | Unset = UNSET + fiscal_year: int | None | Unset = UNSET + fiscal_period: None | str | Unset = UNSET + form_type: None | str | Unset = UNSET + accession_number: None | str | Unset = UNSET + xbrl_elements: list[str] | None | Unset = UNSET + content_url: None | str | Unset = UNSET + content_length: int | Unset = 0 + document_title: None | str | Unset = UNSET + tags: list[str] | None | Unset = UNSET + folder: None | str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + document_id = self.document_id + + graph_id = self.graph_id + + source_type = self.source_type + + content = self.content + + entity_ticker: None | str | Unset + if isinstance(self.entity_ticker, Unset): + entity_ticker = UNSET + else: + entity_ticker = self.entity_ticker + + entity_name: None | str | Unset + if isinstance(self.entity_name, Unset): + entity_name = UNSET + else: + entity_name = self.entity_name + + entity_cik: None | str | Unset + if isinstance(self.entity_cik, Unset): + entity_cik = UNSET + else: + entity_cik = self.entity_cik + + section_label: None | str | Unset + if isinstance(self.section_label, Unset): + section_label = UNSET + else: + section_label = self.section_label + + section_id: None | str | Unset + if isinstance(self.section_id, Unset): + section_id = UNSET + else: + section_id = self.section_id + + element_qname: None | str | Unset + if isinstance(self.element_qname, Unset): + element_qname = UNSET + else: + element_qname = self.element_qname + + filing_date: None | str | Unset + if isinstance(self.filing_date, Unset): + filing_date = UNSET + else: + filing_date = self.filing_date + + fiscal_year: int | None | Unset + if isinstance(self.fiscal_year, Unset): + fiscal_year = UNSET + else: + fiscal_year = self.fiscal_year + + fiscal_period: None | str | Unset + if isinstance(self.fiscal_period, Unset): + fiscal_period = UNSET + else: + fiscal_period = self.fiscal_period + + form_type: None | str | Unset + if isinstance(self.form_type, Unset): + form_type = UNSET + else: + form_type = self.form_type + + accession_number: None | str | Unset + if isinstance(self.accession_number, Unset): + accession_number = UNSET + else: + accession_number = self.accession_number + + xbrl_elements: list[str] | None | Unset + if isinstance(self.xbrl_elements, Unset): + xbrl_elements = UNSET + elif isinstance(self.xbrl_elements, list): + xbrl_elements = self.xbrl_elements + + else: + xbrl_elements = self.xbrl_elements + + content_url: None | str | Unset + if isinstance(self.content_url, Unset): + content_url = UNSET + else: + content_url = self.content_url + + content_length = self.content_length + + document_title: None | str | Unset + if isinstance(self.document_title, Unset): + document_title = UNSET + else: + document_title = self.document_title + + tags: list[str] | None | Unset + if isinstance(self.tags, Unset): + tags = UNSET + elif isinstance(self.tags, list): + tags = self.tags + + else: + tags = self.tags + + folder: None | str | Unset + if isinstance(self.folder, Unset): + folder = UNSET + else: + folder = self.folder + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "document_id": document_id, + "graph_id": graph_id, + "source_type": source_type, + "content": content, + } + ) + if entity_ticker is not UNSET: + field_dict["entity_ticker"] = entity_ticker + if entity_name is not UNSET: + field_dict["entity_name"] = entity_name + if entity_cik is not UNSET: + field_dict["entity_cik"] = entity_cik + if section_label is not UNSET: + field_dict["section_label"] = section_label + if section_id is not UNSET: + field_dict["section_id"] = section_id + if element_qname is not UNSET: + field_dict["element_qname"] = element_qname + if filing_date is not UNSET: + field_dict["filing_date"] = filing_date + if fiscal_year is not UNSET: + field_dict["fiscal_year"] = fiscal_year + if fiscal_period is not UNSET: + field_dict["fiscal_period"] = fiscal_period + if form_type is not UNSET: + field_dict["form_type"] = form_type + if accession_number is not UNSET: + field_dict["accession_number"] = accession_number + if xbrl_elements is not UNSET: + field_dict["xbrl_elements"] = xbrl_elements + if content_url is not UNSET: + field_dict["content_url"] = content_url + if content_length is not UNSET: + field_dict["content_length"] = content_length + if document_title is not UNSET: + field_dict["document_title"] = document_title + if tags is not UNSET: + field_dict["tags"] = tags + if folder is not UNSET: + field_dict["folder"] = folder + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + document_id = d.pop("document_id") + + graph_id = d.pop("graph_id") + + source_type = d.pop("source_type") + + content = d.pop("content") + + def _parse_entity_ticker(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + entity_ticker = _parse_entity_ticker(d.pop("entity_ticker", UNSET)) + + def _parse_entity_name(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + entity_name = _parse_entity_name(d.pop("entity_name", UNSET)) + + def _parse_entity_cik(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + entity_cik = _parse_entity_cik(d.pop("entity_cik", UNSET)) + + def _parse_section_label(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + section_label = _parse_section_label(d.pop("section_label", UNSET)) + + def _parse_section_id(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + section_id = _parse_section_id(d.pop("section_id", UNSET)) + + def _parse_element_qname(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + element_qname = _parse_element_qname(d.pop("element_qname", UNSET)) + + def _parse_filing_date(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + filing_date = _parse_filing_date(d.pop("filing_date", UNSET)) + + def _parse_fiscal_year(data: object) -> int | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(int | None | Unset, data) + + fiscal_year = _parse_fiscal_year(d.pop("fiscal_year", UNSET)) + + def _parse_fiscal_period(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + fiscal_period = _parse_fiscal_period(d.pop("fiscal_period", UNSET)) + + def _parse_form_type(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + form_type = _parse_form_type(d.pop("form_type", UNSET)) + + def _parse_accession_number(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + accession_number = _parse_accession_number(d.pop("accession_number", UNSET)) + + def _parse_xbrl_elements(data: object) -> list[str] | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, list): + raise TypeError() + xbrl_elements_type_0 = cast(list[str], data) + + return xbrl_elements_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(list[str] | None | Unset, data) + + xbrl_elements = _parse_xbrl_elements(d.pop("xbrl_elements", UNSET)) + + def _parse_content_url(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + content_url = _parse_content_url(d.pop("content_url", UNSET)) + + content_length = d.pop("content_length", UNSET) + + def _parse_document_title(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + document_title = _parse_document_title(d.pop("document_title", UNSET)) + + def _parse_tags(data: object) -> list[str] | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, list): + raise TypeError() + tags_type_0 = cast(list[str], data) + + return tags_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(list[str] | None | Unset, data) + + tags = _parse_tags(d.pop("tags", UNSET)) + + def _parse_folder(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + folder = _parse_folder(d.pop("folder", UNSET)) + + document_section = cls( + document_id=document_id, + graph_id=graph_id, + source_type=source_type, + content=content, + entity_ticker=entity_ticker, + entity_name=entity_name, + entity_cik=entity_cik, + section_label=section_label, + section_id=section_id, + element_qname=element_qname, + filing_date=filing_date, + fiscal_year=fiscal_year, + fiscal_period=fiscal_period, + form_type=form_type, + accession_number=accession_number, + xbrl_elements=xbrl_elements, + content_url=content_url, + content_length=content_length, + document_title=document_title, + tags=tags, + folder=folder, + ) + + document_section.additional_properties = d + return document_section + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/document_upload_request.py b/robosystems_client/models/document_upload_request.py new file mode 100644 index 0000000..265e500 --- /dev/null +++ b/robosystems_client/models/document_upload_request.py @@ -0,0 +1,143 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="DocumentUploadRequest") + + +@_attrs_define +class DocumentUploadRequest: + """Upload a markdown document for text indexing. + + Attributes: + title (str): Document title + content (str): Markdown content + tags (list[str] | None | Unset): Optional tags for filtering + folder (None | str | Unset): Optional folder/category + external_id (None | str | Unset): Optional external identifier for upsert (e.g., Google Drive file ID) + """ + + title: str + content: str + tags: list[str] | None | Unset = UNSET + folder: None | str | Unset = UNSET + external_id: None | str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + title = self.title + + content = self.content + + tags: list[str] | None | Unset + if isinstance(self.tags, Unset): + tags = UNSET + elif isinstance(self.tags, list): + tags = self.tags + + else: + tags = self.tags + + folder: None | str | Unset + if isinstance(self.folder, Unset): + folder = UNSET + else: + folder = self.folder + + external_id: None | str | Unset + if isinstance(self.external_id, Unset): + external_id = UNSET + else: + external_id = self.external_id + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "title": title, + "content": content, + } + ) + if tags is not UNSET: + field_dict["tags"] = tags + if folder is not UNSET: + field_dict["folder"] = folder + if external_id is not UNSET: + field_dict["external_id"] = external_id + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + title = d.pop("title") + + content = d.pop("content") + + def _parse_tags(data: object) -> list[str] | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, list): + raise TypeError() + tags_type_0 = cast(list[str], data) + + return tags_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(list[str] | None | Unset, data) + + tags = _parse_tags(d.pop("tags", UNSET)) + + def _parse_folder(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + folder = _parse_folder(d.pop("folder", UNSET)) + + def _parse_external_id(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + external_id = _parse_external_id(d.pop("external_id", UNSET)) + + document_upload_request = cls( + title=title, + content=content, + tags=tags, + folder=folder, + external_id=external_id, + ) + + document_upload_request.additional_properties = d + return document_upload_request + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/document_upload_response.py b/robosystems_client/models/document_upload_response.py new file mode 100644 index 0000000..f22d1ce --- /dev/null +++ b/robosystems_client/models/document_upload_response.py @@ -0,0 +1,86 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +T = TypeVar("T", bound="DocumentUploadResponse") + + +@_attrs_define +class DocumentUploadResponse: + """Response from document upload. + + Attributes: + document_id (str): + sections_indexed (int): + total_content_length (int): + section_ids (list[str]): + """ + + document_id: str + sections_indexed: int + total_content_length: int + section_ids: list[str] + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + document_id = self.document_id + + sections_indexed = self.sections_indexed + + total_content_length = self.total_content_length + + section_ids = self.section_ids + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "document_id": document_id, + "sections_indexed": sections_indexed, + "total_content_length": total_content_length, + "section_ids": section_ids, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + document_id = d.pop("document_id") + + sections_indexed = d.pop("sections_indexed") + + total_content_length = d.pop("total_content_length") + + section_ids = cast(list[str], d.pop("section_ids")) + + document_upload_response = cls( + document_id=document_id, + sections_indexed=sections_indexed, + total_content_length=total_content_length, + section_ids=section_ids, + ) + + document_upload_response.additional_properties = d + return document_upload_response + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/search_hit.py b/robosystems_client/models/search_hit.py new file mode 100644 index 0000000..f642234 --- /dev/null +++ b/robosystems_client/models/search_hit.py @@ -0,0 +1,379 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="SearchHit") + + +@_attrs_define +class SearchHit: + """A single search result with snippet. + + Attributes: + document_id (str): + score (float): + source_type (str): + snippet (str): + entity_ticker (None | str | Unset): + entity_name (None | str | Unset): + section_label (None | str | Unset): + section_id (None | str | Unset): + element_qname (None | str | Unset): + filing_date (None | str | Unset): + fiscal_year (int | None | Unset): + form_type (None | str | Unset): + xbrl_elements (list[str] | None | Unset): + content_length (int | Unset): Default: 0. + content_url (None | str | Unset): + document_title (None | str | Unset): + tags (list[str] | None | Unset): + folder (None | str | Unset): + """ + + document_id: str + score: float + source_type: str + snippet: str + entity_ticker: None | str | Unset = UNSET + entity_name: None | str | Unset = UNSET + section_label: None | str | Unset = UNSET + section_id: None | str | Unset = UNSET + element_qname: None | str | Unset = UNSET + filing_date: None | str | Unset = UNSET + fiscal_year: int | None | Unset = UNSET + form_type: None | str | Unset = UNSET + xbrl_elements: list[str] | None | Unset = UNSET + content_length: int | Unset = 0 + content_url: None | str | Unset = UNSET + document_title: None | str | Unset = UNSET + tags: list[str] | None | Unset = UNSET + folder: None | str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + document_id = self.document_id + + score = self.score + + source_type = self.source_type + + snippet = self.snippet + + entity_ticker: None | str | Unset + if isinstance(self.entity_ticker, Unset): + entity_ticker = UNSET + else: + entity_ticker = self.entity_ticker + + entity_name: None | str | Unset + if isinstance(self.entity_name, Unset): + entity_name = UNSET + else: + entity_name = self.entity_name + + section_label: None | str | Unset + if isinstance(self.section_label, Unset): + section_label = UNSET + else: + section_label = self.section_label + + section_id: None | str | Unset + if isinstance(self.section_id, Unset): + section_id = UNSET + else: + section_id = self.section_id + + element_qname: None | str | Unset + if isinstance(self.element_qname, Unset): + element_qname = UNSET + else: + element_qname = self.element_qname + + filing_date: None | str | Unset + if isinstance(self.filing_date, Unset): + filing_date = UNSET + else: + filing_date = self.filing_date + + fiscal_year: int | None | Unset + if isinstance(self.fiscal_year, Unset): + fiscal_year = UNSET + else: + fiscal_year = self.fiscal_year + + form_type: None | str | Unset + if isinstance(self.form_type, Unset): + form_type = UNSET + else: + form_type = self.form_type + + xbrl_elements: list[str] | None | Unset + if isinstance(self.xbrl_elements, Unset): + xbrl_elements = UNSET + elif isinstance(self.xbrl_elements, list): + xbrl_elements = self.xbrl_elements + + else: + xbrl_elements = self.xbrl_elements + + content_length = self.content_length + + content_url: None | str | Unset + if isinstance(self.content_url, Unset): + content_url = UNSET + else: + content_url = self.content_url + + document_title: None | str | Unset + if isinstance(self.document_title, Unset): + document_title = UNSET + else: + document_title = self.document_title + + tags: list[str] | None | Unset + if isinstance(self.tags, Unset): + tags = UNSET + elif isinstance(self.tags, list): + tags = self.tags + + else: + tags = self.tags + + folder: None | str | Unset + if isinstance(self.folder, Unset): + folder = UNSET + else: + folder = self.folder + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "document_id": document_id, + "score": score, + "source_type": source_type, + "snippet": snippet, + } + ) + if entity_ticker is not UNSET: + field_dict["entity_ticker"] = entity_ticker + if entity_name is not UNSET: + field_dict["entity_name"] = entity_name + if section_label is not UNSET: + field_dict["section_label"] = section_label + if section_id is not UNSET: + field_dict["section_id"] = section_id + if element_qname is not UNSET: + field_dict["element_qname"] = element_qname + if filing_date is not UNSET: + field_dict["filing_date"] = filing_date + if fiscal_year is not UNSET: + field_dict["fiscal_year"] = fiscal_year + if form_type is not UNSET: + field_dict["form_type"] = form_type + if xbrl_elements is not UNSET: + field_dict["xbrl_elements"] = xbrl_elements + if content_length is not UNSET: + field_dict["content_length"] = content_length + if content_url is not UNSET: + field_dict["content_url"] = content_url + if document_title is not UNSET: + field_dict["document_title"] = document_title + if tags is not UNSET: + field_dict["tags"] = tags + if folder is not UNSET: + field_dict["folder"] = folder + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + document_id = d.pop("document_id") + + score = d.pop("score") + + source_type = d.pop("source_type") + + snippet = d.pop("snippet") + + def _parse_entity_ticker(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + entity_ticker = _parse_entity_ticker(d.pop("entity_ticker", UNSET)) + + def _parse_entity_name(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + entity_name = _parse_entity_name(d.pop("entity_name", UNSET)) + + def _parse_section_label(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + section_label = _parse_section_label(d.pop("section_label", UNSET)) + + def _parse_section_id(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + section_id = _parse_section_id(d.pop("section_id", UNSET)) + + def _parse_element_qname(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + element_qname = _parse_element_qname(d.pop("element_qname", UNSET)) + + def _parse_filing_date(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + filing_date = _parse_filing_date(d.pop("filing_date", UNSET)) + + def _parse_fiscal_year(data: object) -> int | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(int | None | Unset, data) + + fiscal_year = _parse_fiscal_year(d.pop("fiscal_year", UNSET)) + + def _parse_form_type(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + form_type = _parse_form_type(d.pop("form_type", UNSET)) + + def _parse_xbrl_elements(data: object) -> list[str] | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, list): + raise TypeError() + xbrl_elements_type_0 = cast(list[str], data) + + return xbrl_elements_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(list[str] | None | Unset, data) + + xbrl_elements = _parse_xbrl_elements(d.pop("xbrl_elements", UNSET)) + + content_length = d.pop("content_length", UNSET) + + def _parse_content_url(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + content_url = _parse_content_url(d.pop("content_url", UNSET)) + + def _parse_document_title(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + document_title = _parse_document_title(d.pop("document_title", UNSET)) + + def _parse_tags(data: object) -> list[str] | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, list): + raise TypeError() + tags_type_0 = cast(list[str], data) + + return tags_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(list[str] | None | Unset, data) + + tags = _parse_tags(d.pop("tags", UNSET)) + + def _parse_folder(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + folder = _parse_folder(d.pop("folder", UNSET)) + + search_hit = cls( + document_id=document_id, + score=score, + source_type=source_type, + snippet=snippet, + entity_ticker=entity_ticker, + entity_name=entity_name, + section_label=section_label, + section_id=section_id, + element_qname=element_qname, + filing_date=filing_date, + fiscal_year=fiscal_year, + form_type=form_type, + xbrl_elements=xbrl_elements, + content_length=content_length, + content_url=content_url, + document_title=document_title, + tags=tags, + folder=folder, + ) + + search_hit.additional_properties = d + return search_hit + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/search_request.py b/robosystems_client/models/search_request.py new file mode 100644 index 0000000..508beca --- /dev/null +++ b/robosystems_client/models/search_request.py @@ -0,0 +1,252 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="SearchRequest") + + +@_attrs_define +class SearchRequest: + """Request model for document search. + + Attributes: + query (str): Search query + entity (None | str | Unset): Filter by ticker, CIK, or entity name + form_type (None | str | Unset): Filter by SEC form type (10-K, 10-Q) + section (None | str | Unset): Filter by section ID (item_1, item_1a, item_7, etc.) + element (None | str | Unset): Filter by XBRL element qname (e.g., us-gaap:Goodwill) + source_type (None | str | Unset): Filter by source type (xbrl_textblock, narrative_section, ixbrl_disclosure, + uploaded_doc, memory) + fiscal_year (int | None | Unset): Filter by fiscal year + date_from (None | str | Unset): Filter filings on or after date (YYYY-MM-DD) + date_to (None | str | Unset): Filter filings on or before date (YYYY-MM-DD) + semantic (bool | Unset): Enable semantic (vector) search if available Default: False. + size (int | Unset): Max results to return Default: 10. + offset (int | Unset): Pagination offset Default: 0. + """ + + query: str + entity: None | str | Unset = UNSET + form_type: None | str | Unset = UNSET + section: None | str | Unset = UNSET + element: None | str | Unset = UNSET + source_type: None | str | Unset = UNSET + fiscal_year: int | None | Unset = UNSET + date_from: None | str | Unset = UNSET + date_to: None | str | Unset = UNSET + semantic: bool | Unset = False + size: int | Unset = 10 + offset: int | Unset = 0 + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + query = self.query + + entity: None | str | Unset + if isinstance(self.entity, Unset): + entity = UNSET + else: + entity = self.entity + + form_type: None | str | Unset + if isinstance(self.form_type, Unset): + form_type = UNSET + else: + form_type = self.form_type + + section: None | str | Unset + if isinstance(self.section, Unset): + section = UNSET + else: + section = self.section + + element: None | str | Unset + if isinstance(self.element, Unset): + element = UNSET + else: + element = self.element + + source_type: None | str | Unset + if isinstance(self.source_type, Unset): + source_type = UNSET + else: + source_type = self.source_type + + fiscal_year: int | None | Unset + if isinstance(self.fiscal_year, Unset): + fiscal_year = UNSET + else: + fiscal_year = self.fiscal_year + + date_from: None | str | Unset + if isinstance(self.date_from, Unset): + date_from = UNSET + else: + date_from = self.date_from + + date_to: None | str | Unset + if isinstance(self.date_to, Unset): + date_to = UNSET + else: + date_to = self.date_to + + semantic = self.semantic + + size = self.size + + offset = self.offset + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "query": query, + } + ) + if entity is not UNSET: + field_dict["entity"] = entity + if form_type is not UNSET: + field_dict["form_type"] = form_type + if section is not UNSET: + field_dict["section"] = section + if element is not UNSET: + field_dict["element"] = element + if source_type is not UNSET: + field_dict["source_type"] = source_type + if fiscal_year is not UNSET: + field_dict["fiscal_year"] = fiscal_year + if date_from is not UNSET: + field_dict["date_from"] = date_from + if date_to is not UNSET: + field_dict["date_to"] = date_to + if semantic is not UNSET: + field_dict["semantic"] = semantic + if size is not UNSET: + field_dict["size"] = size + if offset is not UNSET: + field_dict["offset"] = offset + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + query = d.pop("query") + + def _parse_entity(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + entity = _parse_entity(d.pop("entity", UNSET)) + + def _parse_form_type(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + form_type = _parse_form_type(d.pop("form_type", UNSET)) + + def _parse_section(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + section = _parse_section(d.pop("section", UNSET)) + + def _parse_element(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + element = _parse_element(d.pop("element", UNSET)) + + def _parse_source_type(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + source_type = _parse_source_type(d.pop("source_type", UNSET)) + + def _parse_fiscal_year(data: object) -> int | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(int | None | Unset, data) + + fiscal_year = _parse_fiscal_year(d.pop("fiscal_year", UNSET)) + + def _parse_date_from(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + date_from = _parse_date_from(d.pop("date_from", UNSET)) + + def _parse_date_to(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + date_to = _parse_date_to(d.pop("date_to", UNSET)) + + semantic = d.pop("semantic", UNSET) + + size = d.pop("size", UNSET) + + offset = d.pop("offset", UNSET) + + search_request = cls( + query=query, + entity=entity, + form_type=form_type, + section=section, + element=element, + source_type=source_type, + fiscal_year=fiscal_year, + date_from=date_from, + date_to=date_to, + semantic=semantic, + size=size, + offset=offset, + ) + + search_request.additional_properties = d + return search_request + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/search_response.py b/robosystems_client/models/search_response.py new file mode 100644 index 0000000..bed8199 --- /dev/null +++ b/robosystems_client/models/search_response.py @@ -0,0 +1,100 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +if TYPE_CHECKING: + from ..models.search_hit import SearchHit + + +T = TypeVar("T", bound="SearchResponse") + + +@_attrs_define +class SearchResponse: + """Response model for document search. + + Attributes: + total (int): + hits (list[SearchHit]): + query (str): + graph_id (str): + """ + + total: int + hits: list[SearchHit] + query: str + graph_id: str + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + total = self.total + + hits = [] + for hits_item_data in self.hits: + hits_item = hits_item_data.to_dict() + hits.append(hits_item) + + query = self.query + + graph_id = self.graph_id + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "total": total, + "hits": hits, + "query": query, + "graph_id": graph_id, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.search_hit import SearchHit + + d = dict(src_dict) + total = d.pop("total") + + hits = [] + _hits = d.pop("hits") + for hits_item_data in _hits: + hits_item = SearchHit.from_dict(hits_item_data) + + hits.append(hits_item) + + query = d.pop("query") + + graph_id = d.pop("graph_id") + + search_response = cls( + total=total, + hits=hits, + query=query, + graph_id=graph_id, + ) + + search_response.additional_properties = d + return search_response + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/trial_balance_row.py b/robosystems_client/models/trial_balance_row.py index d0f056d..d8b2378 100644 --- a/robosystems_client/models/trial_balance_row.py +++ b/robosystems_client/models/trial_balance_row.py @@ -1,11 +1,13 @@ from __future__ import annotations from collections.abc import Mapping -from typing import Any, TypeVar +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field +from ..types import UNSET, Unset + T = TypeVar("T", bound="TrialBalanceRow") @@ -20,6 +22,7 @@ class TrialBalanceRow: total_debits (float): total_credits (float): net_balance (float): + account_type (None | str | Unset): """ account_id: str @@ -29,6 +32,7 @@ class TrialBalanceRow: total_debits: float total_credits: float net_balance: float + account_type: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -46,6 +50,12 @@ def to_dict(self) -> dict[str, Any]: net_balance = self.net_balance + account_type: None | str | Unset + if isinstance(self.account_type, Unset): + account_type = UNSET + else: + account_type = self.account_type + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( @@ -59,6 +69,8 @@ def to_dict(self) -> dict[str, Any]: "net_balance": net_balance, } ) + if account_type is not UNSET: + field_dict["account_type"] = account_type return field_dict @@ -79,6 +91,15 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: net_balance = d.pop("net_balance") + def _parse_account_type(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + account_type = _parse_account_type(d.pop("account_type", UNSET)) + trial_balance_row = cls( account_id=account_id, account_code=account_code, @@ -87,6 +108,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: total_debits=total_debits, total_credits=total_credits, net_balance=net_balance, + account_type=account_type, ) trial_balance_row.additional_properties = d From b2fe9326529adbdbf80069427fa2cf3ea423f94c Mon Sep 17 00:00:00 2001 From: "Joseph T. French" Date: Sun, 22 Mar 2026 21:03:39 -0500 Subject: [PATCH 3/5] Add DocumentClient to RoboSystemsExtensions class ## Summary This commit enhances the `RoboSystemsExtensions` class by integrating the `DocumentClient`, allowing for improved document management capabilities within the client library. ## Key Accomplishments - **New Client Integration**: Added `DocumentClient` to the `RoboSystemsExtensions` class, enabling access to document-related functionalities. - **Resource Management**: Updated the `close` method to ensure proper resource management for the new `documents` attribute. ## Changes Breakdown - Modified `extensions.py` to include the `DocumentClient` and updated the initialization and cleanup logic accordingly. ## Testing Notes - Verify that the `DocumentClient` is correctly instantiated and that resources are properly released during the `close` operation. ## Infrastructure Considerations - No breaking changes introduced; existing functionality remains intact. --- .../extensions/document_client.py | 364 ++++++++++++++++++ robosystems_client/extensions/extensions.py | 4 + 2 files changed, 368 insertions(+) create mode 100644 robosystems_client/extensions/document_client.py diff --git a/robosystems_client/extensions/document_client.py b/robosystems_client/extensions/document_client.py new file mode 100644 index 0000000..8358eae --- /dev/null +++ b/robosystems_client/extensions/document_client.py @@ -0,0 +1,364 @@ +"""Document Client for RoboSystems API + +Upload, search, list, and delete text documents indexed in OpenSearch. +Documents are sectioned on markdown headings, embedded for semantic search, +and searchable alongside structured graph data. +""" + +from dataclasses import dataclass, field +from pathlib import Path +from typing import Any, Dict, List, Optional + +import httpx +import logging + +logger = logging.getLogger(__name__) + + +@dataclass +class DocumentUploadResult: + """Result from document upload.""" + + document_id: str + sections_indexed: int + total_content_length: int + section_ids: List[str] + success: bool = True + error: Optional[str] = None + + +@dataclass +class DocumentSearchHit: + """A single search result.""" + + document_id: str + score: float + source_type: str + snippet: str + section_label: Optional[str] = None + document_title: Optional[str] = None + entity_ticker: Optional[str] = None + form_type: Optional[str] = None + filing_date: Optional[str] = None + tags: Optional[List[str]] = None + folder: Optional[str] = None + + +@dataclass +class DocumentSearchResult: + """Result from document search.""" + + total: int + hits: List[DocumentSearchHit] + query: str + graph_id: str + + +@dataclass +class DocumentInfo: + """Information about an indexed document.""" + + document_title: str + section_count: int + source_type: str + folder: Optional[str] = None + tags: Optional[List[str]] = None + last_indexed: Optional[str] = None + + +@dataclass +class DocumentListResult: + """Result from listing documents.""" + + total: int + documents: List[DocumentInfo] + graph_id: str + + +class DocumentClient: + """Client for document upload, search, and management.""" + + def __init__(self, config: Dict[str, Any]): + self.config = config + self.base_url = config["base_url"] + self.headers = config.get("headers", {}) + self.timeout = config.get("timeout", 60) + self._http_client = httpx.Client( + timeout=self.timeout, + headers=self.headers, + ) + + def _url(self, graph_id: str, path: str = "") -> str: + return f"{self.base_url}/v1/graphs/{graph_id}/documents{path}" + + def _search_url(self, graph_id: str, path: str = "") -> str: + return f"{self.base_url}/v1/graphs/{graph_id}/search{path}" + + def upload( + self, + graph_id: str, + title: str, + content: str, + tags: Optional[List[str]] = None, + folder: Optional[str] = None, + external_id: Optional[str] = None, + ) -> DocumentUploadResult: + """Upload a markdown document for text indexing. + + The document is sectioned on headings, embedded, and indexed + into OpenSearch for full-text and semantic search. + + Args: + graph_id: Target graph ID. + title: Document title. + content: Markdown content (may include YAML frontmatter). + tags: Optional tags for filtering. + folder: Optional folder/category. + external_id: Optional external ID for upsert behavior. + + Returns: + DocumentUploadResult with section IDs and counts. + """ + payload: Dict[str, Any] = {"title": title, "content": content} + if tags is not None: + payload["tags"] = tags + if folder is not None: + payload["folder"] = folder + if external_id is not None: + payload["external_id"] = external_id + + response = self._http_client.post(self._url(graph_id), json=payload) + + if response.status_code == 200: + data = response.json() + return DocumentUploadResult( + document_id=data["document_id"], + sections_indexed=data["sections_indexed"], + total_content_length=data["total_content_length"], + section_ids=data["section_ids"], + ) + else: + error_detail = response.text + try: + error_detail = response.json().get("detail", response.text) + except Exception: + pass + return DocumentUploadResult( + document_id="", + sections_indexed=0, + total_content_length=0, + section_ids=[], + success=False, + error=f"{response.status_code}: {error_detail}", + ) + + def upload_file( + self, + graph_id: str, + file_path: str | Path, + title: Optional[str] = None, + tags: Optional[List[str]] = None, + folder: Optional[str] = None, + external_id: Optional[str] = None, + ) -> DocumentUploadResult: + """Upload a markdown file by path. + + Reads the file, optionally extracts title from filename, + and uploads via the document API. + + Args: + graph_id: Target graph ID. + file_path: Path to markdown file. + title: Document title (defaults to filename). + tags: Optional tags. + folder: Optional folder. + external_id: Optional external ID. + """ + path = Path(file_path) + content = path.read_text() + + if title is None: + # Try frontmatter title, fall back to filename + title = path.stem.replace("-", " ").replace("_", " ").title() + + return self.upload( + graph_id=graph_id, + title=title, + content=content, + tags=tags, + folder=folder, + external_id=external_id, + ) + + def upload_directory( + self, + graph_id: str, + directory: str | Path, + pattern: str = "*.md", + folder: Optional[str] = None, + ) -> List[DocumentUploadResult]: + """Upload all markdown files from a directory. + + Args: + graph_id: Target graph ID. + directory: Path to directory containing markdown files. + pattern: Glob pattern for file matching (default: *.md). + folder: Optional folder to apply to all documents. + + Returns: + List of upload results, one per file. + """ + dir_path = Path(directory) + results = [] + + for md_file in sorted(dir_path.glob(pattern)): + result = self.upload_file( + graph_id=graph_id, + file_path=md_file, + folder=folder, + ) + results.append(result) + + return results + + def search( + self, + graph_id: str, + query: str, + source_type: Optional[str] = None, + entity: Optional[str] = None, + form_type: Optional[str] = None, + section: Optional[str] = None, + semantic: bool = False, + size: int = 10, + ) -> DocumentSearchResult: + """Search documents with full-text or semantic search. + + Args: + graph_id: Target graph ID. + query: Search query string. + source_type: Filter by source type (uploaded_doc, memory, etc.). + entity: Filter by entity ticker/CIK/name. + form_type: Filter by SEC form type. + section: Filter by section ID. + semantic: Enable semantic (vector) search. + size: Max results to return. + + Returns: + DocumentSearchResult with ranked hits. + """ + payload: Dict[str, Any] = {"query": query, "size": size} + if source_type is not None: + payload["source_type"] = source_type + if entity is not None: + payload["entity"] = entity + if form_type is not None: + payload["form_type"] = form_type + if section is not None: + payload["section"] = section + if semantic: + payload["semantic"] = True + + response = self._http_client.post(self._search_url(graph_id), json=payload) + response.raise_for_status() + data = response.json() + + hits = [ + DocumentSearchHit( + document_id=h["document_id"], + score=h["score"], + source_type=h["source_type"], + snippet=h["snippet"], + section_label=h.get("section_label"), + document_title=h.get("document_title"), + entity_ticker=h.get("entity_ticker"), + form_type=h.get("form_type"), + filing_date=h.get("filing_date"), + tags=h.get("tags"), + folder=h.get("folder"), + ) + for h in data.get("hits", []) + ] + + return DocumentSearchResult( + total=data["total"], + hits=hits, + query=data["query"], + graph_id=data["graph_id"], + ) + + def get_section(self, graph_id: str, document_id: str) -> Optional[Dict[str, Any]]: + """Retrieve the full text of a document section by ID. + + Args: + graph_id: Target graph ID. + document_id: Document section ID. + + Returns: + Dict with full section content and metadata, or None if not found. + """ + response = self._http_client.get(self._search_url(graph_id, f"/{document_id}")) + if response.status_code == 404: + return None + response.raise_for_status() + return response.json() + + def list( + self, graph_id: str, source_type: Optional[str] = None + ) -> DocumentListResult: + """List indexed documents for a graph. + + Args: + graph_id: Target graph ID. + source_type: Optional filter by source type. + + Returns: + DocumentListResult with document inventory. + """ + params = {} + if source_type is not None: + params["source_type"] = source_type + + response = self._http_client.get(self._url(graph_id), params=params) + response.raise_for_status() + data = response.json() + + documents = [ + DocumentInfo( + document_title=d["document_title"], + section_count=d["section_count"], + source_type=d["source_type"], + folder=d.get("folder"), + tags=d.get("tags"), + last_indexed=d.get("last_indexed"), + ) + for d in data.get("documents", []) + ] + + return DocumentListResult( + total=data["total"], + documents=documents, + graph_id=data["graph_id"], + ) + + def delete(self, graph_id: str, document_id: str) -> bool: + """Delete a document and all its sections. + + Args: + graph_id: Target graph ID. + document_id: Document ID to delete. + + Returns: + True if deleted, False if not found. + """ + response = self._http_client.delete(self._url(graph_id, f"/{document_id}")) + if response.status_code == 204: + return True + if response.status_code == 404: + return False + response.raise_for_status() + return False + + def close(self): + """Close the HTTP client.""" + self._http_client.close() diff --git a/robosystems_client/extensions/extensions.py b/robosystems_client/extensions/extensions.py index ddf53d9..8fa4eb3 100644 --- a/robosystems_client/extensions/extensions.py +++ b/robosystems_client/extensions/extensions.py @@ -10,6 +10,7 @@ from .agent_client import AgentClient from .operation_client import OperationClient from .file_client import FileClient +from .document_client import DocumentClient from .materialization_client import MaterializationClient from .table_client import TableClient from .graph_client import GraphClient @@ -68,6 +69,7 @@ def __init__(self, config: RoboSystemsExtensionConfig = None): self.files = FileClient(self.config) self.materialization = MaterializationClient(self.config) self.tables = TableClient(self.config) + self.documents = DocumentClient(self.config) self.graphs = GraphClient(self.config) def monitor_operation( @@ -104,6 +106,8 @@ def close(self): self.materialization.close() if hasattr(self.tables, "close"): self.tables.close() + if hasattr(self.documents, "close"): + self.documents.close() self.graphs.close() # Convenience methods that delegate to the appropriate clients From 8bf0343a9cb3826afb61f15f2d096b87a636701b Mon Sep 17 00:00:00 2001 From: "Joseph T. French" Date: Sun, 22 Mar 2026 21:52:31 -0500 Subject: [PATCH 4/5] Refactor DocumentClient to use API endpoints for document management ## Summary This commit refactors the `DocumentClient` class to utilize dedicated API endpoints for document upload, search, and retrieval, enhancing the overall functionality and maintainability of the client. ## Key Accomplishments - **API Integration**: Replaced direct HTTP client calls with API functions for document upload, search, and retrieval, improving code clarity and reducing redundancy. - **Data Model Updates**: Changed return types for upload and search methods to use new response models, ensuring consistency with API responses. - **Error Handling**: Improved error handling during document upload to provide clearer feedback on failures. ## Changes Breakdown - Removed outdated dataclass definitions and replaced them with API model imports. - Updated method signatures and return types in `DocumentClient` to align with new API response structures. ## Testing Notes - Verify that document upload and search functionalities work as expected with the new API integration. - Ensure that error handling provides appropriate feedback for failed operations. ## Infrastructure Considerations - No breaking changes introduced; existing functionality remains intact. --- .../extensions/document_client.py | 283 ++++++------------ 1 file changed, 95 insertions(+), 188 deletions(-) diff --git a/robosystems_client/extensions/document_client.py b/robosystems_client/extensions/document_client.py index 8358eae..52a72a2 100644 --- a/robosystems_client/extensions/document_client.py +++ b/robosystems_client/extensions/document_client.py @@ -5,74 +5,23 @@ and searchable alongside structured graph data. """ -from dataclasses import dataclass, field +from http import HTTPStatus from pathlib import Path from typing import Any, Dict, List, Optional -import httpx -import logging - -logger = logging.getLogger(__name__) - - -@dataclass -class DocumentUploadResult: - """Result from document upload.""" - - document_id: str - sections_indexed: int - total_content_length: int - section_ids: List[str] - success: bool = True - error: Optional[str] = None - - -@dataclass -class DocumentSearchHit: - """A single search result.""" - - document_id: str - score: float - source_type: str - snippet: str - section_label: Optional[str] = None - document_title: Optional[str] = None - entity_ticker: Optional[str] = None - form_type: Optional[str] = None - filing_date: Optional[str] = None - tags: Optional[List[str]] = None - folder: Optional[str] = None - - -@dataclass -class DocumentSearchResult: - """Result from document search.""" - - total: int - hits: List[DocumentSearchHit] - query: str - graph_id: str - - -@dataclass -class DocumentInfo: - """Information about an indexed document.""" - - document_title: str - section_count: int - source_type: str - folder: Optional[str] = None - tags: Optional[List[str]] = None - last_indexed: Optional[str] = None - - -@dataclass -class DocumentListResult: - """Result from listing documents.""" - - total: int - documents: List[DocumentInfo] - graph_id: str +from ..api.documents.delete_document import sync_detailed as delete_document +from ..api.documents.list_documents import sync_detailed as list_documents +from ..api.documents.upload_document import sync_detailed as upload_document +from ..api.search.get_document_section import sync_detailed as get_document_section +from ..api.search.search_documents import sync_detailed as search_documents +from ..client import AuthenticatedClient +from ..models.document_list_response import DocumentListResponse +from ..models.document_section import DocumentSection +from ..models.document_upload_request import DocumentUploadRequest +from ..models.document_upload_response import DocumentUploadResponse +from ..models.search_request import SearchRequest +from ..models.search_response import SearchResponse +from ..types import UNSET class DocumentClient: @@ -82,18 +31,20 @@ def __init__(self, config: Dict[str, Any]): self.config = config self.base_url = config["base_url"] self.headers = config.get("headers", {}) + self.token = config.get("token") self.timeout = config.get("timeout", 60) - self._http_client = httpx.Client( - timeout=self.timeout, + + def _get_client(self) -> AuthenticatedClient: + if not self.token: + raise Exception("No API key provided. Set X-API-Key in headers.") + return AuthenticatedClient( + base_url=self.base_url, + token=self.token, + prefix="", + auth_header_name="X-API-Key", headers=self.headers, ) - def _url(self, graph_id: str, path: str = "") -> str: - return f"{self.base_url}/v1/graphs/{graph_id}/documents{path}" - - def _search_url(self, graph_id: str, path: str = "") -> str: - return f"{self.base_url}/v1/graphs/{graph_id}/search{path}" - def upload( self, graph_id: str, @@ -102,7 +53,7 @@ def upload( tags: Optional[List[str]] = None, folder: Optional[str] = None, external_id: Optional[str] = None, - ) -> DocumentUploadResult: + ) -> DocumentUploadResponse: """Upload a markdown document for text indexing. The document is sectioned on headings, embedded, and indexed @@ -117,40 +68,23 @@ def upload( external_id: Optional external ID for upsert behavior. Returns: - DocumentUploadResult with section IDs and counts. + DocumentUploadResponse with section IDs and counts. """ - payload: Dict[str, Any] = {"title": title, "content": content} - if tags is not None: - payload["tags"] = tags - if folder is not None: - payload["folder"] = folder - if external_id is not None: - payload["external_id"] = external_id - - response = self._http_client.post(self._url(graph_id), json=payload) - - if response.status_code == 200: - data = response.json() - return DocumentUploadResult( - document_id=data["document_id"], - sections_indexed=data["sections_indexed"], - total_content_length=data["total_content_length"], - section_ids=data["section_ids"], - ) - else: - error_detail = response.text - try: - error_detail = response.json().get("detail", response.text) - except Exception: - pass - return DocumentUploadResult( - document_id="", - sections_indexed=0, - total_content_length=0, - section_ids=[], - success=False, - error=f"{response.status_code}: {error_detail}", + body = DocumentUploadRequest( + title=title, + content=content, + tags=tags if tags is not None else UNSET, + folder=folder if folder is not None else UNSET, + external_id=external_id if external_id is not None else UNSET, + ) + + client = self._get_client() + response = upload_document(graph_id=graph_id, client=client, body=body) + if response.status_code != HTTPStatus.OK: + raise Exception( + f"Document upload failed ({response.status_code}): {response.content.decode()}" ) + return response.parsed def upload_file( self, @@ -160,7 +94,7 @@ def upload_file( tags: Optional[List[str]] = None, folder: Optional[str] = None, external_id: Optional[str] = None, - ) -> DocumentUploadResult: + ) -> DocumentUploadResponse: """Upload a markdown file by path. Reads the file, optionally extracts title from filename, @@ -178,7 +112,6 @@ def upload_file( content = path.read_text() if title is None: - # Try frontmatter title, fall back to filename title = path.stem.replace("-", " ").replace("_", " ").title() return self.upload( @@ -196,7 +129,7 @@ def upload_directory( directory: str | Path, pattern: str = "*.md", folder: Optional[str] = None, - ) -> List[DocumentUploadResult]: + ) -> List[DocumentUploadResponse]: """Upload all markdown files from a directory. Args: @@ -231,7 +164,7 @@ def search( section: Optional[str] = None, semantic: bool = False, size: int = 10, - ) -> DocumentSearchResult: + ) -> SearchResponse: """Search documents with full-text or semantic search. Args: @@ -245,49 +178,27 @@ def search( size: Max results to return. Returns: - DocumentSearchResult with ranked hits. + SearchResponse with ranked hits. """ - payload: Dict[str, Any] = {"query": query, "size": size} - if source_type is not None: - payload["source_type"] = source_type - if entity is not None: - payload["entity"] = entity - if form_type is not None: - payload["form_type"] = form_type - if section is not None: - payload["section"] = section - if semantic: - payload["semantic"] = True - - response = self._http_client.post(self._search_url(graph_id), json=payload) - response.raise_for_status() - data = response.json() - - hits = [ - DocumentSearchHit( - document_id=h["document_id"], - score=h["score"], - source_type=h["source_type"], - snippet=h["snippet"], - section_label=h.get("section_label"), - document_title=h.get("document_title"), - entity_ticker=h.get("entity_ticker"), - form_type=h.get("form_type"), - filing_date=h.get("filing_date"), - tags=h.get("tags"), - folder=h.get("folder"), - ) - for h in data.get("hits", []) - ] - - return DocumentSearchResult( - total=data["total"], - hits=hits, - query=data["query"], - graph_id=data["graph_id"], + body = SearchRequest( + query=query, + source_type=source_type if source_type is not None else UNSET, + entity=entity if entity is not None else UNSET, + form_type=form_type if form_type is not None else UNSET, + section=section if section is not None else UNSET, + semantic=semantic, + size=size, ) - def get_section(self, graph_id: str, document_id: str) -> Optional[Dict[str, Any]]: + client = self._get_client() + response = search_documents(graph_id=graph_id, client=client, body=body) + if response.status_code != HTTPStatus.OK: + raise Exception( + f"Document search failed ({response.status_code}): {response.content.decode()}" + ) + return response.parsed + + def get_section(self, graph_id: str, document_id: str) -> Optional[DocumentSection]: """Retrieve the full text of a document section by ID. Args: @@ -295,17 +206,23 @@ def get_section(self, graph_id: str, document_id: str) -> Optional[Dict[str, Any document_id: Document section ID. Returns: - Dict with full section content and metadata, or None if not found. + DocumentSection with full content and metadata, or None if not found. """ - response = self._http_client.get(self._search_url(graph_id, f"/{document_id}")) - if response.status_code == 404: + client = self._get_client() + response = get_document_section( + graph_id=graph_id, document_id=document_id, client=client + ) + if response.status_code == HTTPStatus.NOT_FOUND: return None - response.raise_for_status() - return response.json() + if response.status_code != HTTPStatus.OK: + raise Exception( + f"Get section failed ({response.status_code}): {response.content.decode()}" + ) + return response.parsed def list( self, graph_id: str, source_type: Optional[str] = None - ) -> DocumentListResult: + ) -> DocumentListResponse: """List indexed documents for a graph. Args: @@ -313,33 +230,19 @@ def list( source_type: Optional filter by source type. Returns: - DocumentListResult with document inventory. + DocumentListResponse with document inventory. """ - params = {} - if source_type is not None: - params["source_type"] = source_type - - response = self._http_client.get(self._url(graph_id), params=params) - response.raise_for_status() - data = response.json() - - documents = [ - DocumentInfo( - document_title=d["document_title"], - section_count=d["section_count"], - source_type=d["source_type"], - folder=d.get("folder"), - tags=d.get("tags"), - last_indexed=d.get("last_indexed"), - ) - for d in data.get("documents", []) - ] - - return DocumentListResult( - total=data["total"], - documents=documents, - graph_id=data["graph_id"], + client = self._get_client() + response = list_documents( + graph_id=graph_id, + client=client, + source_type=source_type if source_type is not None else UNSET, ) + if response.status_code != HTTPStatus.OK: + raise Exception( + f"List documents failed ({response.status_code}): {response.content.decode()}" + ) + return response.parsed def delete(self, graph_id: str, document_id: str) -> bool: """Delete a document and all its sections. @@ -351,14 +254,18 @@ def delete(self, graph_id: str, document_id: str) -> bool: Returns: True if deleted, False if not found. """ - response = self._http_client.delete(self._url(graph_id, f"/{document_id}")) - if response.status_code == 204: + client = self._get_client() + response = delete_document( + graph_id=graph_id, document_id=document_id, client=client + ) + if response.status_code == HTTPStatus.NO_CONTENT: return True - if response.status_code == 404: + if response.status_code == HTTPStatus.NOT_FOUND: return False - response.raise_for_status() - return False + raise Exception( + f"Delete document failed ({response.status_code}): {response.content.decode()}" + ) def close(self): - """Close the HTTP client.""" - self._http_client.close() + """Close the client (no-op, AuthenticatedClient is created per-call).""" + pass From 543e56db64e88732f3ec4ecf118cc22673074cf6 Mon Sep 17 00:00:00 2001 From: "Joseph T. French" Date: Wed, 25 Mar 2026 20:41:50 -0500 Subject: [PATCH 5/5] Enhance DocumentClient and SearchRequest models with additional filtering options ## Summary This commit updates the `DocumentClient` and `SearchRequest` classes to include new optional parameters for enhanced search capabilities. ## Key Accomplishments - **New Parameters**: Added `element`, `fiscal_year`, `date_from`, and `date_to` parameters to the `DocumentClient` search method, allowing for more granular filtering of search results. - **Documentation Updates**: Improved docstrings for the `DocumentClient` and `SearchRequest` classes to reflect the new parameters and their usage. ## Changes Breakdown - Updated method signatures and docstrings in `document_client.py` to include new filtering options. - Removed the `semantic` parameter from `SearchRequest` as it is no longer utilized. ## Testing Notes - Verify that the new parameters function correctly in search queries and return expected results. - Ensure that existing functionality remains intact and that the removal of the `semantic` parameter does not affect current implementations. ## Infrastructure Considerations - No breaking changes introduced; existing functionality remains intact. --- .../extensions/document_client.py | 24 +++++++++++++------ robosystems_client/models/search_request.py | 9 ------- 2 files changed, 17 insertions(+), 16 deletions(-) diff --git a/robosystems_client/extensions/document_client.py b/robosystems_client/extensions/document_client.py index 52a72a2..48aeb8b 100644 --- a/robosystems_client/extensions/document_client.py +++ b/robosystems_client/extensions/document_client.py @@ -162,19 +162,26 @@ def search( entity: Optional[str] = None, form_type: Optional[str] = None, section: Optional[str] = None, - semantic: bool = False, + element: Optional[str] = None, + fiscal_year: Optional[int] = None, + date_from: Optional[str] = None, + date_to: Optional[str] = None, size: int = 10, ) -> SearchResponse: - """Search documents with full-text or semantic search. + """Search documents with hybrid (BM25 + KNN) search. Args: graph_id: Target graph ID. query: Search query string. - source_type: Filter by source type (uploaded_doc, memory, etc.). + source_type: Filter by source type (xbrl_textblock, narrative_section, + ixbrl_disclosure, uploaded_doc, memory). entity: Filter by entity ticker/CIK/name. - form_type: Filter by SEC form type. - section: Filter by section ID. - semantic: Enable semantic (vector) search. + form_type: Filter by SEC form type (10-K, 10-Q). + section: Filter by section ID (item_1, item_1a, item_7, etc.). + element: Filter by XBRL element qname (e.g., us-gaap:Goodwill). + fiscal_year: Filter by fiscal year. + date_from: Filter filings on or after date (YYYY-MM-DD). + date_to: Filter filings on or before date (YYYY-MM-DD). size: Max results to return. Returns: @@ -186,7 +193,10 @@ def search( entity=entity if entity is not None else UNSET, form_type=form_type if form_type is not None else UNSET, section=section if section is not None else UNSET, - semantic=semantic, + element=element if element is not None else UNSET, + fiscal_year=fiscal_year if fiscal_year is not None else UNSET, + date_from=date_from if date_from is not None else UNSET, + date_to=date_to if date_to is not None else UNSET, size=size, ) diff --git a/robosystems_client/models/search_request.py b/robosystems_client/models/search_request.py index 508beca..a2abda9 100644 --- a/robosystems_client/models/search_request.py +++ b/robosystems_client/models/search_request.py @@ -26,7 +26,6 @@ class SearchRequest: fiscal_year (int | None | Unset): Filter by fiscal year date_from (None | str | Unset): Filter filings on or after date (YYYY-MM-DD) date_to (None | str | Unset): Filter filings on or before date (YYYY-MM-DD) - semantic (bool | Unset): Enable semantic (vector) search if available Default: False. size (int | Unset): Max results to return Default: 10. offset (int | Unset): Pagination offset Default: 0. """ @@ -40,7 +39,6 @@ class SearchRequest: fiscal_year: int | None | Unset = UNSET date_from: None | str | Unset = UNSET date_to: None | str | Unset = UNSET - semantic: bool | Unset = False size: int | Unset = 10 offset: int | Unset = 0 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) @@ -96,8 +94,6 @@ def to_dict(self) -> dict[str, Any]: else: date_to = self.date_to - semantic = self.semantic - size = self.size offset = self.offset @@ -125,8 +121,6 @@ def to_dict(self) -> dict[str, Any]: field_dict["date_from"] = date_from if date_to is not UNSET: field_dict["date_to"] = date_to - if semantic is not UNSET: - field_dict["semantic"] = semantic if size is not UNSET: field_dict["size"] = size if offset is not UNSET: @@ -211,8 +205,6 @@ def _parse_date_to(data: object) -> None | str | Unset: date_to = _parse_date_to(d.pop("date_to", UNSET)) - semantic = d.pop("semantic", UNSET) - size = d.pop("size", UNSET) offset = d.pop("offset", UNSET) @@ -227,7 +219,6 @@ def _parse_date_to(data: object) -> None | str | Unset: fiscal_year=fiscal_year, date_from=date_from, date_to=date_to, - semantic=semantic, size=size, offset=offset, )