From ffae9bc032f2d522837b1e7d76d6046f737c6310 Mon Sep 17 00:00:00 2001 From: Kevin Zheng Date: Thu, 30 Jan 2025 17:37:14 +0000 Subject: [PATCH 01/37] feat: Added read_time as a parameter to various calls (synchronous/base classes) --- google/cloud/firestore_v1/aggregation.py | 24 +- google/cloud/firestore_v1/base_aggregation.py | 13 + google/cloud/firestore_v1/base_client.py | 13 +- google/cloud/firestore_v1/base_collection.py | 8 + google/cloud/firestore_v1/base_document.py | 14 +- google/cloud/firestore_v1/base_query.py | 13 + google/cloud/firestore_v1/base_transaction.py | 4 + google/cloud/firestore_v1/client.py | 25 +- google/cloud/firestore_v1/collection.py | 23 +- google/cloud/firestore_v1/document.py | 18 +- google/cloud/firestore_v1/query.py | 31 ++- google/cloud/firestore_v1/transaction.py | 16 ++ tests/system/test_system.py | 228 ++++++++++++++++++ tests/unit/v1/test_aggregation.py | 41 +++- tests/unit/v1/test_client.py | 26 +- tests/unit/v1/test_collection.py | 16 +- tests/unit/v1/test_cross_language.py | 1 + tests/unit/v1/test_document.py | 41 +++- tests/unit/v1/test_query.py | 108 ++++++++- tests/unit/v1/test_transaction.py | 21 +- 20 files changed, 631 insertions(+), 53 deletions(-) diff --git a/google/cloud/firestore_v1/aggregation.py b/google/cloud/firestore_v1/aggregation.py index ec0fbc1894..7f35ffadfc 100644 --- a/google/cloud/firestore_v1/aggregation.py +++ b/google/cloud/firestore_v1/aggregation.py @@ -20,6 +20,7 @@ """ from __future__ import annotations +from datetime import datetime from typing import TYPE_CHECKING, Any, Generator, List, Optional, Union from google.api_core import exceptions, gapic_v1 @@ -56,6 +57,7 @@ def get( timeout: float | None = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime] = None, ) -> QueryResultsList[AggregationResult]: """Runs the aggregation query. @@ -78,6 +80,10 @@ def get( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. + read_time (Optional[datetime]): If set, reads documents as they were at the given + time. This must be a microsecond precision timestamp within the past one hour, + or if Point-in-Time Recovery is enabled, can additionally be a whole minute + timestamp within the past 7 days. Returns: QueryResultsList[AggregationResult]: The aggregation query results. @@ -90,6 +96,7 @@ def get( retry=retry, timeout=timeout, explain_options=explain_options, + read_time=read_time, ) result_list = list(result) @@ -100,13 +107,14 @@ def get( return QueryResultsList(result_list, explain_options, explain_metrics) - def _get_stream_iterator(self, transaction, retry, timeout, explain_options=None): + def _get_stream_iterator(self, transaction, retry, timeout, explain_options=None, read_time=None): """Helper method for :meth:`stream`.""" request, kwargs = self._prep_stream( transaction, retry, timeout, explain_options, + read_time, ) return self._client._firestore_api.run_aggregation_query( @@ -132,6 +140,7 @@ def _make_stream( retry: Union[retries.Retry, None, object] = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime] = None, ) -> Generator[List[AggregationResult], Any, Optional[ExplainMetrics]]: """Internal method for stream(). Runs the aggregation query. @@ -155,6 +164,10 @@ def _make_stream( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. + read_time (Optional[datetime]): If set, reads documents as they were at the given + time. This must be a microsecond precision timestamp within the past one hour, + or if Point-in-Time Recovery is enabled, can additionally be a whole minute + timestamp within the past 7 days. Yields: List[AggregationResult]: @@ -172,6 +185,7 @@ def _make_stream( retry, timeout, explain_options, + read_time, ) while True: try: @@ -182,6 +196,8 @@ def _make_stream( transaction, retry, timeout, + explain_options, + read_time, ) continue else: @@ -206,6 +222,7 @@ def stream( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime] = None, ) -> StreamGenerator[List[AggregationResult]]: """Runs the aggregation query. @@ -229,6 +246,10 @@ def stream( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. + read_time (Optional[datetime]): If set, reads documents as they were at the given + time. This must be a microsecond precision timestamp within the past one hour, + or if Point-in-Time Recovery is enabled, can additionally be a whole minute + timestamp within the past 7 days. Returns: `StreamGenerator[List[AggregationResult]]`: @@ -239,5 +260,6 @@ def stream( retry=retry, timeout=timeout, explain_options=explain_options, + read_time=read_time, ) return StreamGenerator(inner_generator, explain_options) diff --git a/google/cloud/firestore_v1/base_aggregation.py b/google/cloud/firestore_v1/base_aggregation.py index 34a3baad81..06dee8ac73 100644 --- a/google/cloud/firestore_v1/base_aggregation.py +++ b/google/cloud/firestore_v1/base_aggregation.py @@ -22,6 +22,7 @@ import abc from abc import ABC +from datetime import datetime from typing import TYPE_CHECKING, Any, Coroutine, List, Optional, Tuple, Union from google.api_core import gapic_v1 @@ -205,12 +206,14 @@ def _prep_stream( retry: Union[retries.Retry, retries.AsyncRetry, None, object] = None, timeout: float | None = None, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime] = None, ) -> Tuple[dict, dict]: parent_path, expected_prefix = self._collection_ref._parent_info() request = { "parent": parent_path, "structured_aggregation_query": self._to_protobuf(), "transaction": _helpers.get_transaction_id(transaction), + "read_time": read_time, } if explain_options: request["explain_options"] = explain_options._to_dict() @@ -228,6 +231,7 @@ def get( timeout: float | None = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime] = None, ) -> ( QueryResultsList[AggregationResult] | Coroutine[Any, Any, List[List[AggregationResult]]] @@ -253,6 +257,10 @@ def get( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. + read_time (Optional[datetime]): If set, reads documents as they were at the given + time. This must be a microsecond precision timestamp within the past one hour, + or if Point-in-Time Recovery is enabled, can additionally be a whole minute + timestamp within the past 7 days. Returns: (QueryResultsList[List[AggregationResult]] | Coroutine[Any, Any, List[List[AggregationResult]]]): @@ -270,6 +278,7 @@ def stream( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime] = None, ) -> ( StreamGenerator[List[AggregationResult]] | AsyncStreamGenerator[List[AggregationResult]] @@ -291,6 +300,10 @@ def stream( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. + read_time (Optional[datetime]): If set, reads documents as they were at the given + time. This must be a microsecond precision timestamp within the past one hour, + or if Point-in-Time Recovery is enabled, can additionally be a whole minute + timestamp within the past 7 days. Returns: StreamGenerator[List[AggregationResult]] | AsyncStreamGenerator[List[AggregationResult]]: diff --git a/google/cloud/firestore_v1/base_client.py b/google/cloud/firestore_v1/base_client.py index 9b1c0bccd4..bda8917b17 100644 --- a/google/cloud/firestore_v1/base_client.py +++ b/google/cloud/firestore_v1/base_client.py @@ -26,6 +26,7 @@ from __future__ import annotations import os +from datetime import datetime from typing import ( Any, AsyncGenerator, @@ -437,6 +438,7 @@ def _prep_get_all( transaction: BaseTransaction | None = None, retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: float | None = None, + read_time: datetime | None = None, ) -> Tuple[dict, dict, dict]: """Shared setup for async/sync :meth:`get_all`.""" document_paths, reference_map = _reference_info(references) @@ -446,6 +448,7 @@ def _prep_get_all( "documents": document_paths, "mask": mask, "transaction": _helpers.get_transaction_id(transaction), + "read_time": read_time, } kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) @@ -458,6 +461,8 @@ def get_all( transaction=None, retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: float | None = None, + *, + read_time: datetime | None = None ) -> Union[ AsyncGenerator[DocumentSnapshot, Any], Generator[DocumentSnapshot, Any, Any] ]: @@ -467,9 +472,13 @@ def _prep_collections( self, retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: float | None = None, + read_time: datetime | None = None, ) -> Tuple[dict, dict]: """Shared setup for async/sync :meth:`collections`.""" - request = {"parent": "{}/documents".format(self._database_string)} + request = { + "parent": "{}/documents".format(self._database_string), + "read_time": read_time, + } kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) return request, kwargs @@ -478,6 +487,8 @@ def collections( self, retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: float | None = None, + *, + read_time: datetime | None = None, ): raise NotImplementedError diff --git a/google/cloud/firestore_v1/base_collection.py b/google/cloud/firestore_v1/base_collection.py index b74ced2a38..3c80b16adc 100644 --- a/google/cloud/firestore_v1/base_collection.py +++ b/google/cloud/firestore_v1/base_collection.py @@ -16,6 +16,8 @@ from __future__ import annotations import random + +from datetime import datetime from typing import ( TYPE_CHECKING, Any, @@ -202,6 +204,7 @@ def _prep_list_documents( page_size: Optional[int] = None, retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: Optional[float] = None, + read_time: Optional[datetime] = None, ) -> Tuple[dict, dict]: """Shared setup for async / sync :method:`list_documents`""" parent, _ = self._parent_info() @@ -214,6 +217,7 @@ def _prep_list_documents( # include any fields. To save on data transfer, we can set a field_path mask # to include no fields "mask": {"field_paths": None}, + "read_time": read_time, } kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) @@ -224,6 +228,8 @@ def list_documents( page_size: Optional[int] = None, retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: Optional[float] = None, + *, + read_time: Optional[datetime] = None, ) -> Union[ Generator[DocumentReference, Any, Any], AsyncGenerator[DocumentReference, Any] ]: @@ -497,6 +503,7 @@ def get( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime] = None, ) -> ( QueryResultsList[DocumentSnapshot] | Coroutine[Any, Any, QueryResultsList[DocumentSnapshot]] @@ -510,6 +517,7 @@ def stream( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime] = None, ) -> StreamGenerator[DocumentSnapshot] | AsyncIterator[DocumentSnapshot]: raise NotImplementedError diff --git a/google/cloud/firestore_v1/base_document.py b/google/cloud/firestore_v1/base_document.py index b16b8abace..ccbe5abf72 100644 --- a/google/cloud/firestore_v1/base_document.py +++ b/google/cloud/firestore_v1/base_document.py @@ -26,6 +26,7 @@ Union, Awaitable, ) +from datetime import datetime from google.api_core import retry as retries @@ -290,6 +291,7 @@ def _prep_batch_get( transaction=None, retry: retries.Retry | retries.AsyncRetry | None | object = None, timeout: float | None = None, + read_time: datetime | None = None, ) -> Tuple[dict, dict]: """Shared setup for async/sync :meth:`get`.""" if isinstance(field_paths, str): @@ -305,6 +307,7 @@ def _prep_batch_get( "documents": [self._document_path], "mask": mask, "transaction": _helpers.get_transaction_id(transaction), + "read_time": read_time, } kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) @@ -316,6 +319,8 @@ def get( transaction=None, retry: retries.Retry | retries.AsyncRetry | None | object = None, timeout: float | None = None, + *, + read_time: Optional[datetime] = None, ) -> "DocumentSnapshot" | Awaitable["DocumentSnapshot"]: raise NotImplementedError @@ -324,9 +329,14 @@ def _prep_collections( page_size: int | None = None, retry: retries.Retry | retries.AsyncRetry | None | object = None, timeout: float | None = None, + read_time: datetime | None = None, ) -> Tuple[dict, dict]: """Shared setup for async/sync :meth:`collections`.""" - request = {"parent": self._document_path, "page_size": page_size} + request = { + "parent": self._document_path, + "page_size": page_size, + "read_time": read_time, + } kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) return request, kwargs @@ -336,6 +346,8 @@ def collections( page_size: int | None = None, retry: retries.Retry | retries.AsyncRetry | None | object = None, timeout: float | None = None, + *, + read_time: Optional[datetime] = None, ): raise NotImplementedError diff --git a/google/cloud/firestore_v1/base_query.py b/google/cloud/firestore_v1/base_query.py index 5a9efaf783..0838db5267 100644 --- a/google/cloud/firestore_v1/base_query.py +++ b/google/cloud/firestore_v1/base_query.py @@ -24,6 +24,7 @@ import copy import math import warnings +from datetime import datetime from typing import ( TYPE_CHECKING, Any, @@ -1031,6 +1032,7 @@ def get( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime] = None, ) -> ( QueryResultsList[DocumentSnapshot] | Coroutine[Any, Any, QueryResultsList[DocumentSnapshot]] @@ -1043,6 +1045,7 @@ def _prep_stream( retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: Optional[float] = None, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime] = None, ) -> Tuple[dict, str, dict]: """Shared setup for async / sync :meth:`stream`""" if self._limit_to_last: @@ -1056,6 +1059,7 @@ def _prep_stream( "parent": parent_path, "structured_query": self._to_protobuf(), "transaction": _helpers.get_transaction_id(transaction), + "read_time": read_time, } if explain_options is not None: request["explain_options"] = explain_options._to_dict() @@ -1070,6 +1074,7 @@ def stream( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime] = None, ) -> ( StreamGenerator[document.DocumentSnapshot] | AsyncStreamGenerator[DocumentSnapshot] @@ -1426,6 +1431,7 @@ def _prep_get_partitions( partition_count, retry: retries.Retry | object | None = None, timeout: float | None = None, + read_time: datetime | None = None, ) -> Tuple[dict, dict]: self._validate_partition_query() parent_path, expected_prefix = self._parent._parent_info() @@ -1441,6 +1447,7 @@ def _prep_get_partitions( "parent": parent_path, "structured_query": query._to_protobuf(), "partition_count": partition_count, + "read_time": read_time, } kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) @@ -1451,9 +1458,15 @@ def get_partitions( partition_count, retry: Optional[retries.Retry] = None, timeout: Optional[float] = None, + *, + read_time: Optional[datetime] = None, ): raise NotImplementedError + @staticmethod + def _get_collection_reference_class() -> Type["BaseCollectionGroup"]: + raise NotImplementedError + class QueryPartition: """Represents a bounded partition of a collection group query. diff --git a/google/cloud/firestore_v1/base_transaction.py b/google/cloud/firestore_v1/base_transaction.py index 92e54c81c4..2e21ead37c 100644 --- a/google/cloud/firestore_v1/base_transaction.py +++ b/google/cloud/firestore_v1/base_transaction.py @@ -15,6 +15,7 @@ """Helpers for applying Google Cloud Firestore changes in a transaction.""" from __future__ import annotations +from datetime import datetime from typing import ( TYPE_CHECKING, Any, @@ -148,6 +149,8 @@ def get_all( references: list, retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: float | None = None, + *, + read_time: Optional[datetime] = None, ) -> ( Generator[DocumentSnapshot, Any, None] | Coroutine[Any, Any, AsyncGenerator[DocumentSnapshot, Any]] @@ -161,6 +164,7 @@ def get( timeout: float | None = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime] = None, ) -> ( StreamGenerator[DocumentSnapshot] | Generator[DocumentSnapshot, Any, None] diff --git a/google/cloud/firestore_v1/client.py b/google/cloud/firestore_v1/client.py index 23c6b36ef2..5de5dc6aec 100644 --- a/google/cloud/firestore_v1/client.py +++ b/google/cloud/firestore_v1/client.py @@ -25,6 +25,7 @@ """ from __future__ import annotations +from datetime import datetime from typing import TYPE_CHECKING, Any, Generator, Iterable, List, Optional, Union from google.api_core import gapic_v1 @@ -201,10 +202,12 @@ def document(self, *document_path: str) -> DocumentReference: def get_all( self, references: list, - field_paths: Iterable[str] | None = None, - transaction: Transaction | None = None, + field_paths: Iterable[str] = None, + transaction: Transaction = None, retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, - timeout: float | None = None, + timeout: float = None, + *, + read_time: Optional[datetime] = None, ) -> Generator[DocumentSnapshot, Any, None]: """Retrieve a batch of documents. @@ -239,13 +242,17 @@ def get_all( should be retried. Defaults to a system-specified policy. timeout (float): The timeout for this request. Defaults to a system-specified value. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a microsecond precision timestamp within the past one hour, or + if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp + within the past 7 days. For the most accurate results, use UTC timezone. Yields: .DocumentSnapshot: The next document snapshot that fulfills the query, or :data:`None` if the document does not exist. """ request, reference_map, kwargs = self._prep_get_all( - references, field_paths, transaction, retry, timeout + references, field_paths, transaction, retry, timeout, read_time ) response_iterator = self._firestore_api.batch_get_documents( @@ -260,7 +267,9 @@ def get_all( def collections( self, retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, - timeout: float | None = None, + timeout: float = None, + *, + read_time: Optional[datetime] = None, ) -> Generator[Any, Any, None]: """List top-level collections of the client's database. @@ -269,12 +278,16 @@ def collections( should be retried. Defaults to a system-specified policy. timeout (float): The timeout for this request. Defaults to a system-specified value. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a microsecond precision timestamp within the past one hour, or + if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp + within the past 7 days. For the most accurate results, use UTC timezone. Returns: Sequence[:class:`~google.cloud.firestore_v1.collection.CollectionReference`]: iterator of subcollections of the current document. """ - request, kwargs = self._prep_collections(retry, timeout) + request, kwargs = self._prep_collections(retry, timeout, read_time) iterator = self._firestore_api.list_collection_ids( request=request, diff --git a/google/cloud/firestore_v1/collection.py b/google/cloud/firestore_v1/collection.py index cd6929b688..1f29392bb8 100644 --- a/google/cloud/firestore_v1/collection.py +++ b/google/cloud/firestore_v1/collection.py @@ -15,6 +15,7 @@ """Classes for representing collections for the Google Cloud Firestore API.""" from __future__ import annotations +from datetime import datetime from typing import TYPE_CHECKING, Any, Callable, Generator, Optional, Tuple, Union from google.api_core import gapic_v1 @@ -137,6 +138,8 @@ def list_documents( page_size: Union[int, None] = None, retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, timeout: Union[float, None] = None, + *, + read_time: Optional[datetime] = None, ) -> Generator[Any, Any, None]: """List all subdocuments of the current collection. @@ -148,6 +151,10 @@ def list_documents( should be retried. Defaults to a system-specified policy. timeout (float): The timeout for this request. Defaults to a system-specified value. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a microsecond precision timestamp within the past one hour, or + if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp + within the past 7 days. For the most accurate results, use UTC timezone. Returns: Sequence[:class:`~google.cloud.firestore_v1.collection.DocumentReference`]: @@ -155,7 +162,7 @@ def list_documents( collection does not exist at the time of `snapshot`, the iterator will be empty """ - request, kwargs = self._prep_list_documents(page_size, retry, timeout) + request, kwargs = self._prep_list_documents(page_size, retry, timeout, read_time) iterator = self._client._firestore_api.list_documents( request=request, @@ -174,6 +181,7 @@ def get( timeout: Union[float, None] = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime] = None, ) -> QueryResultsList[DocumentSnapshot]: """Read the documents in this collection. @@ -192,6 +200,10 @@ def get( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a microsecond precision timestamp within the past one hour, or + if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp + within the past 7 days. For the most accurate results, use UTC timezone. If a ``transaction`` is used and it already has write operations added, this method cannot be used (i.e. read-after-write is not @@ -204,6 +216,8 @@ def get( query, kwargs = self._prep_get_or_stream(retry, timeout) if explain_options is not None: kwargs["explain_options"] = explain_options + if read_time is not None: + kwargs["read_time"] = read_time return query.get(transaction=transaction, **kwargs) @@ -214,6 +228,7 @@ def stream( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime] = None, ) -> StreamGenerator[DocumentSnapshot]: """Read the documents in this collection. @@ -245,6 +260,10 @@ def stream( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a microsecond precision timestamp within the past one hour, or + if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp + within the past 7 days. For the most accurate results, use UTC timezone. Returns: `StreamGenerator[DocumentSnapshot]`: A generator of the query results. @@ -252,6 +271,8 @@ def stream( query, kwargs = self._prep_get_or_stream(retry, timeout) if explain_options: kwargs["explain_options"] = explain_options + if read_time is not None: + kwargs["read_time"] = read_time return query.stream(transaction=transaction, **kwargs) diff --git a/google/cloud/firestore_v1/document.py b/google/cloud/firestore_v1/document.py index 0c7d7872fd..8f9e58ad77 100644 --- a/google/cloud/firestore_v1/document.py +++ b/google/cloud/firestore_v1/document.py @@ -16,7 +16,7 @@ from __future__ import annotations import datetime import logging -from typing import Any, Callable, Generator, Iterable +from typing import Any, Callable, Generator, Iterable, Optional from google.api_core import gapic_v1 from google.api_core import retry as retries @@ -365,6 +365,8 @@ def get( transaction=None, retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, + *, + read_time: Optional[datetime.datetime] = None, ) -> DocumentSnapshot: """Retrieve a snapshot of the current document. @@ -387,6 +389,10 @@ def get( should be retried. Defaults to a system-specified policy. timeout (float): The timeout for this request. Defaults to a system-specified value. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a microsecond precision timestamp within the past one hour, or + if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp + within the past 7 days. For the most accurate results, use UTC timezone. Returns: :class:`~google.cloud.firestore_v1.base_document.DocumentSnapshot`: @@ -398,7 +404,7 @@ def get( """ from google.cloud.firestore_v1.base_client import _parse_batch_get - request, kwargs = self._prep_batch_get(field_paths, transaction, retry, timeout) + request, kwargs = self._prep_batch_get(field_paths, transaction, retry, timeout, read_time) response_iter = self._client._firestore_api.batch_get_documents( request=request, @@ -434,6 +440,8 @@ def collections( page_size: int | None = None, retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, + *, + read_time: Optional[datetime.datetime] = None, ) -> Generator[Any, Any, None]: """List subcollections of the current document. @@ -445,6 +453,10 @@ def collections( should be retried. Defaults to a system-specified policy. timeout (float): The timeout for this request. Defaults to a system-specified value. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a microsecond precision timestamp within the past one hour, or + if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp + within the past 7 days. For the most accurate results, use UTC timezone. Returns: Sequence[:class:`~google.cloud.firestore_v1.collection.CollectionReference`]: @@ -452,7 +464,7 @@ def collections( document does not exist at the time of `snapshot`, the iterator will be empty """ - request, kwargs = self._prep_collections(page_size, retry, timeout) + request, kwargs = self._prep_collections(page_size, retry, timeout, read_time) iterator = self._client._firestore_api.list_collection_ids( request=request, diff --git a/google/cloud/firestore_v1/query.py b/google/cloud/firestore_v1/query.py index 0b52afc834..d0a8af272e 100644 --- a/google/cloud/firestore_v1/query.py +++ b/google/cloud/firestore_v1/query.py @@ -20,6 +20,7 @@ """ from __future__ import annotations +from datetime import datetime from typing import TYPE_CHECKING, Any, Callable, Generator, List, Optional, Type from google.api_core import exceptions, gapic_v1 @@ -141,6 +142,7 @@ def get( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime] = None, ) -> QueryResultsList[DocumentSnapshot]: """Read the documents in the collection that match this query. @@ -162,6 +164,10 @@ def get( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a microsecond precision timestamp within the past one hour, or + if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp + within the past 7 days. For the most accurate results, use UTC timezone. Returns: QueryResultsList[DocumentSnapshot]: The documents in the collection @@ -188,6 +194,7 @@ def get( retry=retry, timeout=timeout, explain_options=explain_options, + read_time=read_time, ) result_list = list(result) if is_limited_to_last: @@ -238,13 +245,14 @@ def _chunkify( ): return - def _get_stream_iterator(self, transaction, retry, timeout, explain_options=None): + def _get_stream_iterator(self, transaction, retry, timeout, explain_options=None, read_time=None): """Helper method for :meth:`stream`.""" request, expected_prefix, kwargs = self._prep_stream( transaction, retry, timeout, explain_options, + read_time, ) response_iterator = self._client._firestore_api.run_query( @@ -353,6 +361,7 @@ def _make_stream( retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime] = None, ) -> Generator[DocumentSnapshot, Any, Optional[ExplainMetrics]]: """Internal method for stream(). Read the documents in the collection that match this query. @@ -386,6 +395,10 @@ def _make_stream( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a microsecond precision timestamp within the past one hour, or + if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp + within the past 7 days. For the most accurate results, use UTC timezone. Yields: DocumentSnapshot: @@ -402,6 +415,7 @@ def _make_stream( retry, timeout, explain_options, + read_time, ) last_snapshot = None @@ -416,6 +430,7 @@ def _make_stream( transaction, retry, timeout, + read_time=read_time, ) continue else: @@ -448,6 +463,7 @@ def stream( timeout: float | None = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime] = None, ) -> StreamGenerator[DocumentSnapshot]: """Read the documents in the collection that match this query. @@ -479,6 +495,10 @@ def stream( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a microsecond precision timestamp within the past one hour, or + if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp + within the past 7 days. For the most accurate results, use UTC timezone. Returns: `StreamGenerator[DocumentSnapshot]`: A generator of the query results. @@ -488,6 +508,7 @@ def stream( retry=retry, timeout=timeout, explain_options=explain_options, + read_time=read_time, ) return StreamGenerator(inner_generator, explain_options) @@ -580,6 +601,8 @@ def get_partitions( partition_count, retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, + *, + read_time: Optional[datetime] = None ) -> Generator[QueryPartition, None, None]: """Partition a query for parallelization. @@ -595,8 +618,12 @@ def get_partitions( should be retried. Defaults to a system-specified policy. timeout (float): The timeout for this request. Defaults to a system-specified value. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a microsecond precision timestamp within the past one hour, or + if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp + within the past 7 days. For the most accurate results, use UTC timezone. """ - request, kwargs = self._prep_get_partitions(partition_count, retry, timeout) + request, kwargs = self._prep_get_partitions(partition_count, retry, timeout, read_time) pager = self._client._firestore_api.partition_query( request=request, diff --git a/google/cloud/firestore_v1/transaction.py b/google/cloud/firestore_v1/transaction.py index 37afd5fb00..05b84daa0a 100644 --- a/google/cloud/firestore_v1/transaction.py +++ b/google/cloud/firestore_v1/transaction.py @@ -15,6 +15,7 @@ """Helpers for applying Google Cloud Firestore changes in a transaction.""" from __future__ import annotations +from datetime import datetime from typing import TYPE_CHECKING, Any, Callable, Generator, Optional from google.api_core import exceptions, gapic_v1 @@ -154,6 +155,8 @@ def get_all( references: list, retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, + *, + read_time: Optional[datetime] = None, ) -> Generator[DocumentSnapshot, Any, None]: """Retrieves multiple documents from Firestore. @@ -164,12 +167,18 @@ def get_all( should be retried. Defaults to a system-specified policy. timeout (float): The timeout for this request. Defaults to a system-specified value. + read_time (Optional[datetime]): If set, reads documents as they were at the given + time. This must be a microsecond precision timestamp within the past one hour, + or if Point-in-Time Recovery is enabled, can additionally be a whole minute + timestamp within the past 7 days. Yields: .DocumentSnapshot: The next document snapshot that fulfills the query, or :data:`None` if the document does not exist. """ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) + if read_time is not None: + kwargs["read_time"] = read_time return self._client.get_all(references, transaction=self, **kwargs) def get( @@ -179,6 +188,7 @@ def get( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime] = None, ) -> StreamGenerator[DocumentSnapshot] | Generator[DocumentSnapshot, Any, None]: """Retrieve a document or a query result from the database. @@ -194,6 +204,10 @@ def get( Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. Can only be used when running a query, not a document reference. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a microsecond precision timestamp within the past one hour, or + if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp + within the past 7 days. For the most accurate results, use UTC timezone. Yields: .DocumentSnapshot: The next document snapshot that fulfills the @@ -205,6 +219,8 @@ def get( reference. """ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) + if read_time is not None: + kwargs["read_time"] = read_time if isinstance(ref_or_query, DocumentReference): if explain_options is not None: raise ValueError( diff --git a/tests/system/test_system.py b/tests/system/test_system.py index b8adebb6b6..5d4cda4e89 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -217,6 +217,42 @@ def test_collection_stream_or_get_w_explain_options_analyze_true( assert len(execution_stats.debug_stats) > 0 +@pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) +def test_collections_w_read_time(client, cleanup, database): + first_collection_id = "doc-create" + UNIQUE_RESOURCE_ID + first_document_id = "doc" + UNIQUE_RESOURCE_ID + first_document = client.document(first_collection_id, first_document_id) + # Add to clean-up before API request (in case ``create()`` fails). + cleanup(first_document.delete) + + data = { + "status": "new" + } + write_result = first_document.create(data) + read_time = write_result.update_time + num_collections = len(list(client.collections())) + + second_collection_id = "doc-create" + UNIQUE_RESOURCE_ID + "-2" + second_document_id = "doc" + UNIQUE_RESOURCE_ID + "-2" + second_document = client.document(second_collection_id, second_document_id) + cleanup(second_document.delete) + second_document.create(data) + + # We're just testing that we added one collection at read_time, not two. + collections = list(client.collections(read_time=read_time)) + assert len(collections) == num_collections + ids = [collection.id for collection in collections] + assert second_collection_id not in ids + assert first_collection_id in ids + + # Test that listing current collections does have the second id. + curr_collections = list(client.collections()) + assert len(curr_collections) == num_collections + 1 + ids = [collection.id for collection in curr_collections] + assert second_collection_id in ids + assert first_collection_id in ids + + @pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) def test_create_document(client, cleanup, database): now = datetime.datetime.now(tz=datetime.timezone.utc) @@ -946,6 +982,32 @@ def test_document_get(client, cleanup, database): check_snapshot(snapshot, document, data, write_result) +@pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) +def test_document_get_w_read_time(client, cleanup, database): + now = datetime.datetime.now(tz=datetime.timezone.utc) + document_id = "for-get" + UNIQUE_RESOURCE_ID + document = client.document("created", document_id) + # Add to clean-up before API request (in case ``create()`` fails). + cleanup(document.delete) + + # First make sure it doesn't exist. + assert not document.get().exists + + initial_data = { + "test": "success" + } + write_result = document.create(initial_data) + read_time = write_result.update_time + + new_data = { + "test": "changed" + } + document.update(new_data) + + snapshot = document.get(read_time=read_time) + check_snapshot(snapshot, document, initial_data, write_result) + + @pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) def test_document_delete(client, cleanup, database): document_id = "deleted" + UNIQUE_RESOURCE_ID @@ -1072,6 +1134,31 @@ def test_collection_add(client, cleanup, database): assert set(collection3.list_documents()) == {document_ref5} +@pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) +def test_list_collections_with_read_time(client, cleanup, database): + # TODO(microgen): list_documents is returning a generator, not a list. + # Consider if this is desired. Also, Document isn't hashable. + collection_id = "coll-add" + UNIQUE_RESOURCE_ID + collection = client.collection(collection_id) + + assert set(collection.list_documents()) == set() + + data1 = {"foo": "bar"} + update_time1, document_ref1 = collection.add(data1) + cleanup(document_ref1.delete) + assert set(collection.list_documents()) == {document_ref1} + + data2 = {"bar": "baz"} + update_time2, document_ref2 = collection.add(data2) + cleanup(document_ref2.delete) + assert set(collection.list_documents()) == {document_ref1, document_ref2} + assert set(collection.list_documents(read_time=update_time1)) == {document_ref1} + assert set(collection.list_documents(read_time=update_time2)) == { + document_ref1, + document_ref2, + } + + @pytest.fixture def query_docs(client, database): collection_id = "qs" + UNIQUE_RESOURCE_ID @@ -1450,6 +1537,41 @@ def test_query_stream_or_get_w_explain_options_analyze_false( explain_metrics.execution_stats +@pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) +def test_query_stream_w_read_time(query_docs, cleanup, database): + collection, stored, allowed_vals = query_docs + num_vals = len(allowed_vals) + + # Find the most recent read_time in collections + read_time = max(docref.get().read_time for docref in collection.list_documents()) + new_data = { + "a": 9000, + "b": 1, + "c": [10000, 1000], + "stats": {"sum": 9001, "product": 9000}, + } + _, new_ref = collection.add(new_data) + # Add to clean-up. + cleanup(new_ref.delete) + stored[new_ref.id] = new_data + + # Compare query at read_time to query at current time. + query = collection.where(filter=FieldFilter("b", "==", 1)) + values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream(read_time=read_time)} + assert len(values) == num_vals + assert new_ref.id not in values + for key, value in values.items(): + assert stored[key] == value + assert value["b"] == 1 + assert value["a"] != 9000 + assert key != new_ref + + new_values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream()} + assert len(new_values) == num_vals + 1 + assert new_ref.id in new_values + assert new_values[new_ref.id] == new_data + + @pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) def test_query_with_order_dot_key(client, cleanup, database): db = client @@ -1760,6 +1882,7 @@ def test_get_all(client, cleanup, database): document3 = client.document(collection_name, "c") # Add to clean-up before API requests (in case ``create()`` fails). cleanup(document1.delete) + cleanup(document2.delete) cleanup(document3.delete) data1 = {"a": {"b": 2, "c": 3}, "d": 4, "e": 0} @@ -1767,6 +1890,8 @@ def test_get_all(client, cleanup, database): data3 = {"a": {"b": 5, "c": 6}, "d": 7, "e": 100} write_result3 = document3.create(data3) + read_time = write_result3.update_time + # 0. Get 3 unique documents, one of which is missing. snapshots = list(client.get_all([document1, document2, document3])) @@ -1802,6 +1927,26 @@ def test_get_all(client, cleanup, database): restricted3 = {"a": {"b": data3["a"]["b"]}, "d": data3["d"]} check_snapshot(snapshot3, document3, restricted3, write_result3) + # 3. Use ``read_time`` in ``get_all`` + new_data = {"a": {"b": 8, "c": 9}, "d": 10, "e": 1010} + document1.update(new_data) + document2.create(new_data) + document3.update(new_data) + + + snapshots = list(client.get_all([document1, document2, document3], read_time=read_time)) + assert snapshots[0].exists + assert snapshots[1].exists + assert not snapshots[2].exists + + snapshots = [snapshot for snapshot in snapshots if snapshot.exists] + id_attr = operator.attrgetter("id") + snapshots.sort(key=id_attr) + + snapshot1, snapshot3 = snapshots + check_snapshot(snapshot1, document1, data1, write_result1) + check_snapshot(snapshot3, document3, data3, write_result3) + @pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) def test_batch(client, cleanup, database): @@ -2977,6 +3122,45 @@ def test_aggregation_query_stream_or_get_w_explain_options_analyze_false( explain_metrics.execution_stats +@pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) +@pytest.mark.parametrize("aggregation_type,expected_value", + [("count", 5), ("sum", 100), ("avg", 4.0)]) +def test_aggregation_queries_with_read_time(collection, query, cleanup, database, aggregation_type, expected_value): + """ + Ensure that all aggregation queries work when read_time is passed into + a query..().get() method + """ + # Find the most recent read_time in collections + read_time = max(docref.get().read_time for docref in collection.list_documents()) + document_data = { + "a": 1, + "b": 9000, + "c": [1, 123123123], + "stats": {"sum": 9001, "product": 9000}, + } + + _, doc_ref = collection.add(document_data) + cleanup(doc_ref.delete) + + if aggregation_type == "count": + aggregation_query = query.count() + elif aggregation_type == "sum": + aggregation_query = collection.sum("stats.product") + elif aggregation_type == "avg": + aggregation_query = collection.avg("stats.product") + + # Check that adding the new document data affected the results of the aggregation queries. + new_result = aggregation_query.get() + assert len(new_result) == 1 + for r in new_result[0]: + assert r.value != expected_value + + old_result = aggregation_query.get(read_time=read_time) + assert len(old_result) == 1 + for r in old_result[0]: + assert r.value == expected_value + + @pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) def test_query_with_and_composite_filter(collection, database): and_filter = And( @@ -3206,6 +3390,50 @@ def in_transaction(transaction): assert inner_fn_ran is True +@pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) +def test_query_in_transaction_with_read_time(client, cleanup, database): + """ + Test query profiling in transactions. + """ + collection_id = "doc-create" + UNIQUE_RESOURCE_ID + doc_ids = [f"doc{i}" + UNIQUE_RESOURCE_ID for i in range(5)] + doc_refs = [client.document(collection_id, doc_id) for doc_id in doc_ids] + for doc_ref in doc_refs: + cleanup(doc_ref.delete) + doc_refs[0].create({"a": 1, "b": 2}) + doc_refs[1].create({"a": 1, "b": 1}) + + read_time = max(docref.get().read_time for docref in doc_refs) + doc_refs[2].create({"a": 1, "b": 3}) + + collection = client.collection(collection_id) + query = collection.where(filter=FieldFilter("a", "==", 1)) + + with client.transaction() as transaction: + # should work when transaction is initiated through transactional decorator + @firestore.transactional + def in_transaction(transaction): + global inner_fn_ran + + new_b_values = [docs.get("b") for docs in transaction.get(query, read_time=read_time)] + assert len(new_b_values) == 2 + assert 1 in new_b_values + assert 2 in new_b_values + assert 3 not in new_b_values + + new_b_values = [docs.get("b") for docs in transaction.get(query)] + assert len(new_b_values) == 3 + assert 1 in new_b_values + assert 2 in new_b_values + assert 3 in new_b_values + + inner_fn_ran = True + + in_transaction(transaction) + # make sure we didn't skip assertions in inner function + assert inner_fn_ran is True + + @pytest.mark.parametrize("with_rollback,expected", [(True, 2), (False, 3)]) @pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) def test_transaction_rollback(client, cleanup, database, with_rollback, expected): diff --git a/tests/unit/v1/test_aggregation.py b/tests/unit/v1/test_aggregation.py index 4d1eed1980..ea17ccd082 100644 --- a/tests/unit/v1/test_aggregation.py +++ b/tests/unit/v1/test_aggregation.py @@ -327,6 +327,7 @@ def test_aggregation_query_prep_stream(): "parent": parent_path, "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": None, + "read_time": None, } assert request == expected_request assert kwargs == {"retry": None} @@ -353,6 +354,7 @@ def test_aggregation_query_prep_stream_with_transaction(): "parent": parent_path, "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": txn_id, + "read_time": None, } assert request == expected_request assert kwargs == {"retry": None} @@ -379,6 +381,7 @@ def test_aggregation_query_prep_stream_with_explain_options(): "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": None, "explain_options": explain_options._to_dict(), + "read_time": None, } assert request == expected_request assert kwargs == {"retry": None} @@ -387,8 +390,9 @@ def test_aggregation_query_prep_stream_with_explain_options(): def _aggregation_query_get_helper( retry=None, timeout=None, - read_time=None, explain_options=None, + response_read_time=None, + query_read_time=None, ): from google.cloud._helpers import _datetime_to_pb_timestamp @@ -411,7 +415,7 @@ def _aggregation_query_get_helper( aggregation_query = make_aggregation_query(query) aggregation_query.count(alias="all") - aggregation_result = AggregationResult(alias="total", value=5, read_time=read_time) + aggregation_result = AggregationResult(alias="total", value=5, read_time=response_read_time) if explain_options is not None: explain_metrics = {"execution_stats": {"results_returned": 1}} @@ -419,14 +423,18 @@ def _aggregation_query_get_helper( explain_metrics = None response_pb = make_aggregation_query_response( [aggregation_result], - read_time=read_time, + read_time=response_read_time, explain_metrics=explain_metrics, ) firestore_api.run_aggregation_query.return_value = iter([response_pb]) kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) # Execute the query and check the response. - returned = aggregation_query.get(**kwargs, explain_options=explain_options) + returned = aggregation_query.get( + **kwargs, + explain_options=explain_options, + read_time=query_read_time, + ) assert isinstance(returned, QueryResultsList) assert len(returned) == 1 @@ -434,9 +442,9 @@ def _aggregation_query_get_helper( for r in result: assert r.alias == aggregation_result.alias assert r.value == aggregation_result.value - if read_time is not None: + if response_read_time is not None: result_datetime = _datetime_to_pb_timestamp(r.read_time) - assert result_datetime == read_time + assert result_datetime == response_read_time assert returned._explain_options == explain_options assert returned.explain_options == explain_options @@ -454,6 +462,7 @@ def _aggregation_query_get_helper( "parent": parent_path, "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": None, + "read_time": query_read_time, } if explain_options is not None: expected_request["explain_options"] = explain_options._to_dict() @@ -473,9 +482,9 @@ def test_aggregation_query_get(): def test_aggregation_query_get_with_readtime(): from google.cloud._helpers import _datetime_to_pb_timestamp - one_hour_ago = datetime.now(tz=timezone.utc) - timedelta(hours=1) - read_time = _datetime_to_pb_timestamp(one_hour_ago) - _aggregation_query_get_helper(read_time=read_time) + query_read_time = datetime.now(tz=timezone.utc) - timedelta(hours=1) + response_read_time = _datetime_to_pb_timestamp(query_read_time) + _aggregation_query_get_helper(response_read_time=response_read_time, query_read_time=query_read_time) def test_aggregation_query_get_retry_timeout(): @@ -535,6 +544,7 @@ def test_aggregation_query_get_transaction(): "parent": parent_path, "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": txn_id, + "read_time": None, }, metadata=client._rpc_metadata, **kwargs, @@ -555,6 +565,7 @@ def _aggregation_query_stream_w_retriable_exc_helper( timeout=None, transaction=None, expect_retry=True, + read_time=None, ): from google.api_core import exceptions, gapic_v1 @@ -634,6 +645,7 @@ def _stream_w_exception(*_args, **_kw): "parent": parent_path, "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": expected_transaction_id, + "read_time": read_time, }, metadata=client._rpc_metadata, **kwargs, @@ -645,6 +657,7 @@ def _stream_w_exception(*_args, **_kw): "parent": parent_path, "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": None, + "read_time": read_time, }, metadata=client._rpc_metadata, **kwargs, @@ -713,7 +726,11 @@ def _aggregation_query_stream_helper( kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) # Execute the query and check the response. - returned = aggregation_query.stream(**kwargs, explain_options=explain_options) + returned = aggregation_query.stream( + **kwargs, + explain_options=explain_options, + read_time=read_time + ) assert isinstance(returned, StreamGenerator) results = [] @@ -740,6 +757,7 @@ def _aggregation_query_stream_helper( "parent": parent_path, "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": None, + "read_time": read_time, } if explain_options is not None: expected_request["explain_options"] = explain_options._to_dict() @@ -756,7 +774,7 @@ def test_aggregation_query_stream(): _aggregation_query_stream_helper() -def test_aggregation_query_stream_with_readtime(): +def test_aggregation_query_stream_w_read_time(): from google.cloud._helpers import _datetime_to_pb_timestamp one_hour_ago = datetime.now(tz=timezone.utc) - timedelta(hours=1) @@ -830,6 +848,7 @@ def test_aggregation_from_query(): "parent": parent_path, "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": txn_id, + "read_time": None, }, metadata=client._rpc_metadata, **kwargs, diff --git a/tests/unit/v1/test_client.py b/tests/unit/v1/test_client.py index edb411c9ff..a50049a8a1 100644 --- a/tests/unit/v1/test_client.py +++ b/tests/unit/v1/test_client.py @@ -281,7 +281,7 @@ def test_client_document_factory_w_nested_path(database): assert isinstance(document2, DocumentReference) -def _collections_helper(retry=None, timeout=None, database=None): +def _collections_helper(retry=None, timeout=None, database=None, read_time=None): from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.collection import CollectionReference @@ -298,7 +298,7 @@ def __iter__(self): client._firestore_api_internal = firestore_api kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) - collections = list(client.collections(**kwargs)) + collections = list(client.collections(read_time=read_time, **kwargs)) assert len(collections) == len(collection_ids) for collection, collection_id in zip(collections, collection_ids): @@ -308,7 +308,10 @@ def __iter__(self): base_path = client._database_string + "/documents" firestore_api.list_collection_ids.assert_called_once_with( - request={"parent": base_path}, + request={ + "parent": base_path, + "read_time": read_time, + }, metadata=client._rpc_metadata, **kwargs, ) @@ -328,6 +331,12 @@ def test_client_collections_w_retry_timeout(database): _collections_helper(retry=retry, timeout=timeout, database=database) +@pytest.mark.parametrize("database", [None, DEFAULT_DATABASE, "somedb"]) +def test_client_collections_read_time(database): + read_time = datetime.datetime.now(tz=datetime.timezone.utc) + _collections_helper(database=database, read_time=read_time) + + def _invoke_get_all(client, references, document_pbs, **kwargs): # Create a minimal fake GAPIC with a dummy response. firestore_api = mock.Mock(spec=["batch_get_documents"]) @@ -345,7 +354,7 @@ def _invoke_get_all(client, references, document_pbs, **kwargs): def _get_all_helper( - num_snapshots=2, txn_id=None, retry=None, timeout=None, database=None + num_snapshots=2, txn_id=None, retry=None, timeout=None, database=None, read_time=None ): from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.async_document import DocumentSnapshot @@ -384,6 +393,7 @@ def _get_all_helper( documents, responses, field_paths=field_paths, + read_time=read_time, **kwargs, ) @@ -409,6 +419,7 @@ def _get_all_helper( "documents": doc_paths, "mask": mask, "transaction": txn_id, + "read_time": read_time, }, metadata=client._rpc_metadata, **kwargs, @@ -440,6 +451,12 @@ def test_client_get_all_wrong_order(database): _get_all_helper(num_snapshots=3, database=database) +@pytest.mark.parametrize("database", [None, DEFAULT_DATABASE, "somedb"]) +def test_client_get_all_read_time(database): + read_time = datetime.datetime.now(tz=datetime.timezone.utc) + _get_all_helper(database=database, read_time=read_time) + + @pytest.mark.parametrize("database", [None, DEFAULT_DATABASE, "somedb"]) def test_client_get_all_unknown_result(database): from google.cloud.firestore_v1.base_client import _BAD_DOC_TEMPLATE @@ -468,6 +485,7 @@ def test_client_get_all_unknown_result(database): "documents": doc_paths, "mask": None, "transaction": None, + "read_time": None, }, metadata=client._rpc_metadata, ) diff --git a/tests/unit/v1/test_collection.py b/tests/unit/v1/test_collection.py index 29f76108d1..00f080447d 100644 --- a/tests/unit/v1/test_collection.py +++ b/tests/unit/v1/test_collection.py @@ -16,6 +16,7 @@ import mock +from datetime import datetime from tests.unit.v1._test_helpers import DEFAULT_TEST_PROJECT @@ -266,7 +267,7 @@ def test_add_w_retry_timeout(): _add_helper(retry=retry, timeout=timeout) -def _list_documents_helper(page_size=None, retry=None, timeout=None): +def _list_documents_helper(page_size=None, retry=None, timeout=None, read_time=None): from google.api_core.page_iterator import Iterator, Page from google.cloud.firestore_v1 import _helpers as _fs_v1_helpers @@ -299,9 +300,13 @@ def _next_page(self): kwargs = _fs_v1_helpers.make_retry_timeout_kwargs(retry, timeout) if page_size is not None: - documents = list(collection.list_documents(page_size=page_size, **kwargs)) + documents = list(collection.list_documents( + page_size=page_size, + **kwargs, + read_time=read_time, + )) else: - documents = list(collection.list_documents(**kwargs)) + documents = list(collection.list_documents(**kwargs, read_time=read_time)) # Verify the response and the mocks. assert len(documents) == len(document_ids) @@ -318,6 +323,7 @@ def _next_page(self): "page_size": page_size, "show_missing": True, "mask": {"field_paths": None}, + "read_time": read_time, }, metadata=client._rpc_metadata, **kwargs, @@ -340,6 +346,10 @@ def test_list_documents_w_page_size(): _list_documents_helper(page_size=25) +def test_list_documents_w_read_time(): + _list_documents_helper(read_time=datetime.now()) + + @mock.patch("google.cloud.firestore_v1.query.Query", autospec=True) def test_get(query_class): collection = _make_collection_reference("collection") diff --git a/tests/unit/v1/test_cross_language.py b/tests/unit/v1/test_cross_language.py index d2adeb2ba6..a5ccb16be9 100644 --- a/tests/unit/v1/test_cross_language.py +++ b/tests/unit/v1/test_cross_language.py @@ -154,6 +154,7 @@ def test_get_testprotos(test_proto): "documents": [doc._document_path], "mask": None, "transaction": None, + "read_time": None, } firestore_api.batch_get_documents.assert_called_once_with( diff --git a/tests/unit/v1/test_document.py b/tests/unit/v1/test_document.py index b9116ae61d..2840f7ec01 100644 --- a/tests/unit/v1/test_document.py +++ b/tests/unit/v1/test_document.py @@ -16,6 +16,7 @@ import mock import pytest +from proto.datetime_helpers import DatetimeWithNanoseconds from tests.unit.v1._test_helpers import DEFAULT_TEST_PROJECT @@ -393,6 +394,7 @@ def _get_helper( retry=None, timeout=None, database=None, + read_time=None, ): from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.transaction import Transaction @@ -401,10 +403,10 @@ def _get_helper( # Create a minimal fake GAPIC with a dummy response. create_time = 123 update_time = 234 - read_time = 345 + response_read_time = read_time.timestamp_pb() if read_time else 345 firestore_api = mock.Mock(spec=["batch_get_documents"]) response = mock.create_autospec(firestore.BatchGetDocumentsResponse) - response.read_time = read_time + response.read_time = response_read_time response.found = mock.create_autospec(document.Document) response.found.fields = {} response.found.create_time = create_time @@ -435,7 +437,10 @@ def WhichOneof(val): kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) snapshot = document_reference.get( - field_paths=field_paths, transaction=transaction, **kwargs + field_paths=field_paths, + transaction=transaction, + **kwargs, + read_time=read_time, ) assert snapshot.reference is document_reference @@ -448,7 +453,7 @@ def WhichOneof(val): else: assert snapshot.to_dict() == {} assert snapshot.exists - assert snapshot.read_time is read_time + assert snapshot.read_time is response_read_time assert snapshot.create_time is create_time assert snapshot.update_time is update_time @@ -469,6 +474,7 @@ def WhichOneof(val): "documents": [document_reference._document_path], "mask": mask, "transaction": expected_transaction_id, + "read_time": read_time, }, metadata=client._rpc_metadata, **kwargs, @@ -519,8 +525,18 @@ def test_documentreference_get_with_multiple_field_paths(database): def test_documentreference_get_with_transaction(database): _get_helper(use_transaction=True, database=database) +@pytest.mark.parametrize("database", [None, "somedb"]) +def test_documentreference_get_with_read_time(database): + _get_helper(read_time=DatetimeWithNanoseconds.now(), database=database) + -def _collections_helper(page_size=None, retry=None, timeout=None, database=None): +def _collections_helper( + page_size=None, + retry=None, + timeout=None, + read_time=None, + database=None +): from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.collection import CollectionReference from google.cloud.firestore_v1.services.firestore.client import FirestoreClient @@ -541,9 +557,9 @@ def __iter__(self): # Actually make a document and call delete(). document = _make_document_reference("where", "we-are", client=client) if page_size is not None: - collections = list(document.collections(page_size=page_size, **kwargs)) + collections = list(document.collections(page_size=page_size, **kwargs, read_time=read_time)) else: - collections = list(document.collections(**kwargs)) + collections = list(document.collections(**kwargs, read_time=read_time)) # Verify the response and the mocks. assert len(collections) == len(collection_ids) @@ -553,7 +569,11 @@ def __iter__(self): assert collection.id == collection_id api_client.list_collection_ids.assert_called_once_with( - request={"parent": document._document_path, "page_size": page_size}, + request={ + "parent": document._document_path, + "page_size": page_size, + "read_time": read_time, + }, metadata=client._rpc_metadata, **kwargs, ) @@ -578,6 +598,11 @@ def test_documentreference_collections_w_retry_timeout(database): _collections_helper(retry=retry, timeout=timeout, database=database) +@pytest.mark.parametrize("database", [None, "somedb"]) +def test_documentreference_collections_w_page_size(database): + _collections_helper(read_time=DatetimeWithNanoseconds.now(), database=database) + + @mock.patch("google.cloud.firestore_v1.document.Watch", autospec=True) def test_documentreference_on_snapshot(watch): client = mock.Mock(_database_string="sprinklez", spec=["_database_string"]) diff --git a/tests/unit/v1/test_query.py b/tests/unit/v1/test_query.py index f30a4fcdff..08b61eb164 100644 --- a/tests/unit/v1/test_query.py +++ b/tests/unit/v1/test_query.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import datetime import types import mock @@ -42,6 +43,7 @@ def _query_get_helper( timeout=None, database=None, explain_options=None, + read_time=None, ): from google.cloud.firestore_v1 import _helpers @@ -71,7 +73,7 @@ def _query_get_helper( # Execute the query and check the response. query = make_query(parent) - returned = query.get(**kwargs, explain_options=explain_options) + returned = query.get(**kwargs, explain_options=explain_options, read_time=read_time) assert isinstance(returned, QueryResultsList) assert len(returned) == 1 @@ -94,6 +96,7 @@ def _query_get_helper( "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, + "read_time": read_time, } if explain_options: request["explain_options"] = explain_options._to_dict() @@ -118,6 +121,11 @@ def test_query_get_w_retry_timeout(): _query_get_helper(retry=retry, timeout=timeout) +def test_query_get_w_read_time(): + read_time = datetime.datetime.now(tz=datetime.timezone.utc) + _query_get_helper(read_time=read_time) + + @pytest.mark.parametrize("database", [None, "somedb"]) def test_query_get_limit_to_last(database): from google.cloud import firestore @@ -169,6 +177,7 @@ def test_query_get_limit_to_last(database): "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, + "read_time": None, }, metadata=client._rpc_metadata, ) @@ -338,6 +347,7 @@ def _query_stream_helper( timeout=None, database=None, explain_options=None, + read_time=None, ): from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.stream_generator import StreamGenerator @@ -369,7 +379,7 @@ def _query_stream_helper( # Execute the query and check the response. query = make_query(parent) - get_response = query.stream(**kwargs, explain_options=explain_options) + get_response = query.stream(**kwargs, explain_options=explain_options, read_time=read_time) assert isinstance(get_response, StreamGenerator) returned = list(get_response) @@ -393,6 +403,7 @@ def _query_stream_helper( "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, + "read_time": read_time, } if explain_options is not None: request["explain_options"] = explain_options._to_dict() @@ -417,6 +428,11 @@ def test_query_stream_w_retry_timeout(): _query_stream_helper(retry=retry, timeout=timeout) +def test_query_stream_w_read_time(): + read_time = datetime.datetime.now(tz=datetime.timezone.utc) + _query_stream_helper(read_time=read_time) + + @pytest.mark.parametrize("database", [None, "somedb"]) def test_query_stream_with_limit_to_last(database): # Attach the fake GAPIC to a real client. @@ -475,6 +491,65 @@ def test_query_stream_with_transaction(database): "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": txn_id, + "read_time": None, + }, + metadata=client._rpc_metadata, + ) + + +def test_query_stream_w_explain_options(): + from google.cloud.firestore_v1.query_profile import ExplainOptions + + explain_options = ExplainOptions(analyze=True) + _query_stream_helper(explain_options=explain_options) + + +@pytest.mark.parametrize("database", [None, "somedb"]) +def test_query_stream_with_transaction_and_read_time(database): + from google.cloud.firestore_v1.stream_generator import StreamGenerator + + # Create a minimal fake GAPIC. + firestore_api = mock.Mock(spec=["run_query"]) + + # Attach the fake GAPIC to a real client. + client = make_client(database=database) + client._firestore_api_internal = firestore_api + + # Create a real-ish transaction for this client. + transaction = client.transaction() + txn_id = b"\x00\x00\x01-work-\xf2" + transaction._id = txn_id + + # Create a read_time for this client. + read_time = datetime.datetime.now(tz=datetime.timezone.utc) + + # Make a **real** collection reference as parent. + parent = client.collection("declaration") + + # Add a dummy response to the minimal fake GAPIC. + parent_path, expected_prefix = parent._parent_info() + name = "{}/burger".format(expected_prefix) + data = {"lettuce": b"\xee\x87"} + response_pb = _make_query_response(name=name, data=data) + firestore_api.run_query.return_value = iter([response_pb]) + + # Execute the query and check the response. + query = make_query(parent) + get_response = query.stream(transaction=transaction, read_time=read_time) + assert isinstance(get_response, StreamGenerator) + returned = list(get_response) + assert len(returned) == 1 + snapshot = returned[0] + assert snapshot.reference._path == ("declaration", "burger") + assert snapshot.to_dict() == data + + # Verify the mock call. + firestore_api.run_query.assert_called_once_with( + request={ + "parent": parent_path, + "structured_query": query._to_protobuf(), + "transaction": txn_id, + "read_time": read_time, }, metadata=client._rpc_metadata, ) @@ -510,6 +585,7 @@ def test_query_stream_no_results(database): "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, + "read_time": None, }, metadata=client._rpc_metadata, ) @@ -545,6 +621,7 @@ def test_query_stream_second_response_in_empty_stream(database): "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, + "read_time": None, }, metadata=client._rpc_metadata, ) @@ -589,6 +666,7 @@ def test_query_stream_with_skipped_results(database): "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, + "read_time": None, }, metadata=client._rpc_metadata, ) @@ -633,6 +711,7 @@ def test_query_stream_empty_after_first_response(database): "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, + "read_time": None, }, metadata=client._rpc_metadata, ) @@ -680,6 +759,7 @@ def test_query_stream_w_collection_group(database): "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, + "read_time": None, }, metadata=client._rpc_metadata, ) @@ -690,7 +770,7 @@ def test_query_stream_w_collection_group(database): def _query_stream_w_retriable_exc_helper( - retry=_not_passed, timeout=None, transaction=None, expect_retry=True, database=None + retry=_not_passed, timeout=None, transaction=None, expect_retry=True, database=None, read_time=None ): from google.api_core import exceptions, gapic_v1 @@ -734,7 +814,7 @@ def _stream_w_exception(*_args, **_kw): # Execute the query and check the response. query = make_query(parent) - get_response = query.stream(transaction=transaction, **kwargs) + get_response = query.stream(transaction=transaction, read_time=read_time, **kwargs) assert isinstance(get_response, StreamGenerator) if expect_retry: @@ -768,6 +848,7 @@ def _stream_w_exception(*_args, **_kw): "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": expected_transaction_id, + "read_time": read_time, }, metadata=client._rpc_metadata, **kwargs, @@ -780,6 +861,7 @@ def _stream_w_exception(*_args, **_kw): "parent": parent_path, "structured_query": new_query._to_protobuf(), "transaction": None, + "read_time": read_time, }, metadata=client._rpc_metadata, **kwargs, @@ -804,11 +886,9 @@ def test_query_stream_w_retriable_exc_w_transaction(): _query_stream_w_retriable_exc_helper(transaction=txn) -def test_query_stream_w_explain_options(): - from google.cloud.firestore_v1.query_profile import ExplainOptions - - explain_options = ExplainOptions(analyze=True) - _query_stream_helper(explain_options=explain_options) +def test_query_stream_w_retriable_exc_w_read_time(): + read_time = datetime.datetime.now(tz=datetime.timezone.utc) + _query_stream_w_retriable_exc_helper(read_time=read_time) @mock.patch("google.cloud.firestore_v1.query.Watch", autospec=True) @@ -842,7 +922,7 @@ def test_collection_group_constructor_all_descendents_is_false(): _make_collection_group(mock.sentinel.parent, all_descendants=False) -def _collection_group_get_partitions_helper(retry=None, timeout=None, database=None): +def _collection_group_get_partitions_helper(retry=None, timeout=None, database=None, read_time=None): from google.cloud.firestore_v1 import _helpers # Create a minimal fake GAPIC. @@ -868,7 +948,7 @@ def _collection_group_get_partitions_helper(retry=None, timeout=None, database=N # Execute the query and check the response. query = _make_collection_group(parent) - get_response = query.get_partitions(2, **kwargs) + get_response = query.get_partitions(2, read_time=read_time, **kwargs) assert isinstance(get_response, types.GeneratorType) returned = list(get_response) @@ -885,6 +965,7 @@ def _collection_group_get_partitions_helper(retry=None, timeout=None, database=N "parent": parent_path, "structured_query": partition_query._to_protobuf(), "partition_count": 2, + "read_time": read_time, }, metadata=client._rpc_metadata, **kwargs, @@ -903,6 +984,11 @@ def test_collection_group_get_partitions_w_retry_timeout(): _collection_group_get_partitions_helper(retry=retry, timeout=timeout) +def test_collection_group_get_partitions_w_read_time(): + read_time = datetime.datetime.now(tz=datetime.timezone.utc) + _collection_group_get_partitions_helper(read_time=read_time) + + @pytest.mark.parametrize("database", [None, "somedb"]) def test_collection_group_get_partitions_w_filter(database): # Make a **real** collection reference as parent. diff --git a/tests/unit/v1/test_transaction.py b/tests/unit/v1/test_transaction.py index 941e294dbd..20cc3d652a 100644 --- a/tests/unit/v1/test_transaction.py +++ b/tests/unit/v1/test_transaction.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import datetime import mock import pytest @@ -312,13 +313,15 @@ def test_transaction__commit_failure(database): ) -def _transaction_get_all_helper(retry=None, timeout=None): +def _transaction_get_all_helper(retry=None, timeout=None, read_time=None): from google.cloud.firestore_v1 import _helpers client = mock.Mock(spec=["get_all"]) transaction = _make_transaction(client) ref1, ref2 = mock.Mock(), mock.Mock() kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) + if read_time is not None: + kwargs["read_time"] = read_time result = transaction.get_all([ref1, ref2], **kwargs) @@ -342,10 +345,16 @@ def test_transaction_get_all_w_retry_timeout(): _transaction_get_all_helper(retry=retry, timeout=timeout) +def test_transaction_get_all_w_read_time(): + read_time = datetime.datetime.now(tz=datetime.timezone.utc) + _transaction_get_all_helper(read_time=read_time) + + def _transaction_get_w_document_ref_helper( retry=None, timeout=None, explain_options=None, + read_time=None, ): from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.document import DocumentReference @@ -357,6 +366,8 @@ def _transaction_get_w_document_ref_helper( if explain_options is not None: kwargs["explain_options"] = explain_options + if read_time is not None: + kwargs["read_time"] = read_time result = transaction.get(ref, **kwargs) @@ -392,6 +403,7 @@ def _transaction_get_w_query_helper( retry=None, timeout=None, explain_options=None, + read_time=None, ): from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.query import Query @@ -434,6 +446,7 @@ def _transaction_get_w_query_helper( query, **kwargs, explain_options=explain_options, + read_time=read_time, ) # Verify the response. @@ -459,6 +472,7 @@ def _transaction_get_w_query_helper( "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": b"beep-fail-commit", + "read_time": read_time, } if explain_options is not None: request["explain_options"] = explain_options._to_dict() @@ -489,6 +503,11 @@ def test_transaction_get_w_query_w_explain_options(): _transaction_get_w_query_helper(explain_options=ExplainOptions(analyze=True)) +def test_transaction_get_w_query_w_read_time(): + read_time = datetime.datetime.now(tz=datetime.timezone.utc) + _transaction_get_w_query_helper(read_time=read_time) + + @pytest.mark.parametrize("database", [None, "somedb"]) def test_transaction_get_failure(database): client = _make_client(database=database) From 0bc5302a203270fdc03c7043ef7b8840b312404d Mon Sep 17 00:00:00 2001 From: Kevin Zheng Date: Thu, 30 Jan 2025 19:03:31 +0000 Subject: [PATCH 02/37] Added system test for DocumentReference.collections --- tests/system/test_system.py | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 5d4cda4e89..47600d67a4 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -740,6 +740,38 @@ def test_create_document_w_subcollection(client, cleanup, database): assert sorted(child.id for child in children) == sorted(child_ids) +@pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) +def test_document_collections_w_read_time(client, cleanup, database): + collection_id = "doc-create-sub" + UNIQUE_RESOURCE_ID + document_id = "doc" + UNIQUE_RESOURCE_ID + document = client.document(collection_id, document_id) + # Add to clean-up before API request (in case ``create()`` fails). + cleanup(document.delete) + + data = {"now": firestore.SERVER_TIMESTAMP} + document.create(data) + + original_child_ids = ["child1", "child2"] + read_time = None + + for child_id in original_child_ids: + subcollection = document.collection(child_id) + update_time, subdoc = subcollection.add({"foo": "bar"}) + read_time = update_time if read_time is None or update_time > read_time else read_time + cleanup(subdoc.delete) + + update_time, newdoc = document.collection("child3").add({"foo": "bar"}) + cleanup(newdoc.delete) + assert update_time > read_time + + # Compare the query at read_time to the query at new update time. + original_children = document.collections(read_time=read_time) + assert sorted(child.id for child in original_children) == sorted(original_child_ids) + + original_children = document.collections() + assert sorted(child.id for child in original_children) == sorted(original_child_ids + ["child3"]) + + def assert_timestamp_less(timestamp_pb1, timestamp_pb2): assert timestamp_pb1 < timestamp_pb2 From e9eefd6cf130caba9576ae4edbdb7f6daccb74b5 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 30 Jan 2025 19:10:44 +0000 Subject: [PATCH 03/37] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20?= =?UTF-8?q?post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- google/cloud/firestore_v1/aggregation.py | 4 +- google/cloud/firestore_v1/base_client.py | 2 +- google/cloud/firestore_v1/collection.py | 4 +- google/cloud/firestore_v1/document.py | 4 +- google/cloud/firestore_v1/query.py | 10 +++-- tests/system/test_system.py | 47 ++++++++++++++---------- tests/unit/v1/test_aggregation.py | 12 +++--- tests/unit/v1/test_client.py | 7 +++- tests/unit/v1/test_collection.py | 12 +++--- tests/unit/v1/test_document.py | 13 +++---- tests/unit/v1/test_query.py | 15 ++++++-- 11 files changed, 82 insertions(+), 48 deletions(-) diff --git a/google/cloud/firestore_v1/aggregation.py b/google/cloud/firestore_v1/aggregation.py index 7f35ffadfc..a19cb2464b 100644 --- a/google/cloud/firestore_v1/aggregation.py +++ b/google/cloud/firestore_v1/aggregation.py @@ -107,7 +107,9 @@ def get( return QueryResultsList(result_list, explain_options, explain_metrics) - def _get_stream_iterator(self, transaction, retry, timeout, explain_options=None, read_time=None): + def _get_stream_iterator( + self, transaction, retry, timeout, explain_options=None, read_time=None + ): """Helper method for :meth:`stream`.""" request, kwargs = self._prep_stream( transaction, diff --git a/google/cloud/firestore_v1/base_client.py b/google/cloud/firestore_v1/base_client.py index bda8917b17..864a9d5835 100644 --- a/google/cloud/firestore_v1/base_client.py +++ b/google/cloud/firestore_v1/base_client.py @@ -462,7 +462,7 @@ def get_all( retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: float | None = None, *, - read_time: datetime | None = None + read_time: datetime | None = None, ) -> Union[ AsyncGenerator[DocumentSnapshot, Any], Generator[DocumentSnapshot, Any, Any] ]: diff --git a/google/cloud/firestore_v1/collection.py b/google/cloud/firestore_v1/collection.py index 1f29392bb8..204ee05363 100644 --- a/google/cloud/firestore_v1/collection.py +++ b/google/cloud/firestore_v1/collection.py @@ -162,7 +162,9 @@ def list_documents( collection does not exist at the time of `snapshot`, the iterator will be empty """ - request, kwargs = self._prep_list_documents(page_size, retry, timeout, read_time) + request, kwargs = self._prep_list_documents( + page_size, retry, timeout, read_time + ) iterator = self._client._firestore_api.list_documents( request=request, diff --git a/google/cloud/firestore_v1/document.py b/google/cloud/firestore_v1/document.py index 8f9e58ad77..436bec0665 100644 --- a/google/cloud/firestore_v1/document.py +++ b/google/cloud/firestore_v1/document.py @@ -404,7 +404,9 @@ def get( """ from google.cloud.firestore_v1.base_client import _parse_batch_get - request, kwargs = self._prep_batch_get(field_paths, transaction, retry, timeout, read_time) + request, kwargs = self._prep_batch_get( + field_paths, transaction, retry, timeout, read_time + ) response_iter = self._client._firestore_api.batch_get_documents( request=request, diff --git a/google/cloud/firestore_v1/query.py b/google/cloud/firestore_v1/query.py index d0a8af272e..859c780e49 100644 --- a/google/cloud/firestore_v1/query.py +++ b/google/cloud/firestore_v1/query.py @@ -245,7 +245,9 @@ def _chunkify( ): return - def _get_stream_iterator(self, transaction, retry, timeout, explain_options=None, read_time=None): + def _get_stream_iterator( + self, transaction, retry, timeout, explain_options=None, read_time=None + ): """Helper method for :meth:`stream`.""" request, expected_prefix, kwargs = self._prep_stream( transaction, @@ -602,7 +604,7 @@ def get_partitions( retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, *, - read_time: Optional[datetime] = None + read_time: Optional[datetime] = None, ) -> Generator[QueryPartition, None, None]: """Partition a query for parallelization. @@ -623,7 +625,9 @@ def get_partitions( if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp within the past 7 days. For the most accurate results, use UTC timezone. """ - request, kwargs = self._prep_get_partitions(partition_count, retry, timeout, read_time) + request, kwargs = self._prep_get_partitions( + partition_count, retry, timeout, read_time + ) pager = self._client._firestore_api.partition_query( request=request, diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 47600d67a4..b3f3ef5d3a 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -225,9 +225,7 @@ def test_collections_w_read_time(client, cleanup, database): # Add to clean-up before API request (in case ``create()`` fails). cleanup(first_document.delete) - data = { - "status": "new" - } + data = {"status": "new"} write_result = first_document.create(data) read_time = write_result.update_time num_collections = len(list(client.collections())) @@ -757,9 +755,11 @@ def test_document_collections_w_read_time(client, cleanup, database): for child_id in original_child_ids: subcollection = document.collection(child_id) update_time, subdoc = subcollection.add({"foo": "bar"}) - read_time = update_time if read_time is None or update_time > read_time else read_time + read_time = ( + update_time if read_time is None or update_time > read_time else read_time + ) cleanup(subdoc.delete) - + update_time, newdoc = document.collection("child3").add({"foo": "bar"}) cleanup(newdoc.delete) assert update_time > read_time @@ -769,7 +769,9 @@ def test_document_collections_w_read_time(client, cleanup, database): assert sorted(child.id for child in original_children) == sorted(original_child_ids) original_children = document.collections() - assert sorted(child.id for child in original_children) == sorted(original_child_ids + ["child3"]) + assert sorted(child.id for child in original_children) == sorted( + original_child_ids + ["child3"] + ) def assert_timestamp_less(timestamp_pb1, timestamp_pb2): @@ -1025,15 +1027,11 @@ def test_document_get_w_read_time(client, cleanup, database): # First make sure it doesn't exist. assert not document.get().exists - initial_data = { - "test": "success" - } + initial_data = {"test": "success"} write_result = document.create(initial_data) read_time = write_result.update_time - new_data = { - "test": "changed" - } + new_data = {"test": "changed"} document.update(new_data) snapshot = document.get(read_time=read_time) @@ -1589,7 +1587,10 @@ def test_query_stream_w_read_time(query_docs, cleanup, database): # Compare query at read_time to query at current time. query = collection.where(filter=FieldFilter("b", "==", 1)) - values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream(read_time=read_time)} + values = { + snapshot.id: snapshot.to_dict() + for snapshot in query.stream(read_time=read_time) + } assert len(values) == num_vals assert new_ref.id not in values for key, value in values.items(): @@ -1964,9 +1965,10 @@ def test_get_all(client, cleanup, database): document1.update(new_data) document2.create(new_data) document3.update(new_data) - - snapshots = list(client.get_all([document1, document2, document3], read_time=read_time)) + snapshots = list( + client.get_all([document1, document2, document3], read_time=read_time) + ) assert snapshots[0].exists assert snapshots[1].exists assert not snapshots[2].exists @@ -3155,11 +3157,14 @@ def test_aggregation_query_stream_or_get_w_explain_options_analyze_false( @pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) -@pytest.mark.parametrize("aggregation_type,expected_value", - [("count", 5), ("sum", 100), ("avg", 4.0)]) -def test_aggregation_queries_with_read_time(collection, query, cleanup, database, aggregation_type, expected_value): +@pytest.mark.parametrize( + "aggregation_type,expected_value", [("count", 5), ("sum", 100), ("avg", 4.0)] +) +def test_aggregation_queries_with_read_time( + collection, query, cleanup, database, aggregation_type, expected_value +): """ - Ensure that all aggregation queries work when read_time is passed into + Ensure that all aggregation queries work when read_time is passed into a query..().get() method """ # Find the most recent read_time in collections @@ -3447,7 +3452,9 @@ def test_query_in_transaction_with_read_time(client, cleanup, database): def in_transaction(transaction): global inner_fn_ran - new_b_values = [docs.get("b") for docs in transaction.get(query, read_time=read_time)] + new_b_values = [ + docs.get("b") for docs in transaction.get(query, read_time=read_time) + ] assert len(new_b_values) == 2 assert 1 in new_b_values assert 2 in new_b_values diff --git a/tests/unit/v1/test_aggregation.py b/tests/unit/v1/test_aggregation.py index ea17ccd082..c3d90e3056 100644 --- a/tests/unit/v1/test_aggregation.py +++ b/tests/unit/v1/test_aggregation.py @@ -415,7 +415,9 @@ def _aggregation_query_get_helper( aggregation_query = make_aggregation_query(query) aggregation_query.count(alias="all") - aggregation_result = AggregationResult(alias="total", value=5, read_time=response_read_time) + aggregation_result = AggregationResult( + alias="total", value=5, read_time=response_read_time + ) if explain_options is not None: explain_metrics = {"execution_stats": {"results_returned": 1}} @@ -484,7 +486,9 @@ def test_aggregation_query_get_with_readtime(): query_read_time = datetime.now(tz=timezone.utc) - timedelta(hours=1) response_read_time = _datetime_to_pb_timestamp(query_read_time) - _aggregation_query_get_helper(response_read_time=response_read_time, query_read_time=query_read_time) + _aggregation_query_get_helper( + response_read_time=response_read_time, query_read_time=query_read_time + ) def test_aggregation_query_get_retry_timeout(): @@ -727,9 +731,7 @@ def _aggregation_query_stream_helper( # Execute the query and check the response. returned = aggregation_query.stream( - **kwargs, - explain_options=explain_options, - read_time=read_time + **kwargs, explain_options=explain_options, read_time=read_time ) assert isinstance(returned, StreamGenerator) diff --git a/tests/unit/v1/test_client.py b/tests/unit/v1/test_client.py index a50049a8a1..997341f221 100644 --- a/tests/unit/v1/test_client.py +++ b/tests/unit/v1/test_client.py @@ -354,7 +354,12 @@ def _invoke_get_all(client, references, document_pbs, **kwargs): def _get_all_helper( - num_snapshots=2, txn_id=None, retry=None, timeout=None, database=None, read_time=None + num_snapshots=2, + txn_id=None, + retry=None, + timeout=None, + database=None, + read_time=None, ): from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.async_document import DocumentSnapshot diff --git a/tests/unit/v1/test_collection.py b/tests/unit/v1/test_collection.py index 00f080447d..006032213b 100644 --- a/tests/unit/v1/test_collection.py +++ b/tests/unit/v1/test_collection.py @@ -300,11 +300,13 @@ def _next_page(self): kwargs = _fs_v1_helpers.make_retry_timeout_kwargs(retry, timeout) if page_size is not None: - documents = list(collection.list_documents( - page_size=page_size, - **kwargs, - read_time=read_time, - )) + documents = list( + collection.list_documents( + page_size=page_size, + **kwargs, + read_time=read_time, + ) + ) else: documents = list(collection.list_documents(**kwargs, read_time=read_time)) diff --git a/tests/unit/v1/test_document.py b/tests/unit/v1/test_document.py index 2840f7ec01..21809cb5ac 100644 --- a/tests/unit/v1/test_document.py +++ b/tests/unit/v1/test_document.py @@ -525,17 +525,14 @@ def test_documentreference_get_with_multiple_field_paths(database): def test_documentreference_get_with_transaction(database): _get_helper(use_transaction=True, database=database) + @pytest.mark.parametrize("database", [None, "somedb"]) def test_documentreference_get_with_read_time(database): _get_helper(read_time=DatetimeWithNanoseconds.now(), database=database) def _collections_helper( - page_size=None, - retry=None, - timeout=None, - read_time=None, - database=None + page_size=None, retry=None, timeout=None, read_time=None, database=None ): from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.collection import CollectionReference @@ -557,7 +554,9 @@ def __iter__(self): # Actually make a document and call delete(). document = _make_document_reference("where", "we-are", client=client) if page_size is not None: - collections = list(document.collections(page_size=page_size, **kwargs, read_time=read_time)) + collections = list( + document.collections(page_size=page_size, **kwargs, read_time=read_time) + ) else: collections = list(document.collections(**kwargs, read_time=read_time)) @@ -570,7 +569,7 @@ def __iter__(self): api_client.list_collection_ids.assert_called_once_with( request={ - "parent": document._document_path, + "parent": document._document_path, "page_size": page_size, "read_time": read_time, }, diff --git a/tests/unit/v1/test_query.py b/tests/unit/v1/test_query.py index 08b61eb164..a6ce8fadf0 100644 --- a/tests/unit/v1/test_query.py +++ b/tests/unit/v1/test_query.py @@ -379,7 +379,9 @@ def _query_stream_helper( # Execute the query and check the response. query = make_query(parent) - get_response = query.stream(**kwargs, explain_options=explain_options, read_time=read_time) + get_response = query.stream( + **kwargs, explain_options=explain_options, read_time=read_time + ) assert isinstance(get_response, StreamGenerator) returned = list(get_response) @@ -770,7 +772,12 @@ def test_query_stream_w_collection_group(database): def _query_stream_w_retriable_exc_helper( - retry=_not_passed, timeout=None, transaction=None, expect_retry=True, database=None, read_time=None + retry=_not_passed, + timeout=None, + transaction=None, + expect_retry=True, + database=None, + read_time=None, ): from google.api_core import exceptions, gapic_v1 @@ -922,7 +929,9 @@ def test_collection_group_constructor_all_descendents_is_false(): _make_collection_group(mock.sentinel.parent, all_descendants=False) -def _collection_group_get_partitions_helper(retry=None, timeout=None, database=None, read_time=None): +def _collection_group_get_partitions_helper( + retry=None, timeout=None, database=None, read_time=None +): from google.cloud.firestore_v1 import _helpers # Create a minimal fake GAPIC. From a1d80308ba1ca3caf6cdd0ad25fdc2873838b671 Mon Sep 17 00:00:00 2001 From: Kevin Zheng Date: Thu, 30 Jan 2025 19:48:46 +0000 Subject: [PATCH 04/37] linting --- tests/system/test_system.py | 1 - tests/unit/v1/test_document.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/system/test_system.py b/tests/system/test_system.py index b3f3ef5d3a..205b39f35a 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -1018,7 +1018,6 @@ def test_document_get(client, cleanup, database): @pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) def test_document_get_w_read_time(client, cleanup, database): - now = datetime.datetime.now(tz=datetime.timezone.utc) document_id = "for-get" + UNIQUE_RESOURCE_ID document = client.document("created", document_id) # Add to clean-up before API request (in case ``create()`` fails). diff --git a/tests/unit/v1/test_document.py b/tests/unit/v1/test_document.py index 21809cb5ac..19686d244d 100644 --- a/tests/unit/v1/test_document.py +++ b/tests/unit/v1/test_document.py @@ -598,7 +598,7 @@ def test_documentreference_collections_w_retry_timeout(database): @pytest.mark.parametrize("database", [None, "somedb"]) -def test_documentreference_collections_w_page_size(database): +def test_documentreference_collections_w_read_time(database): _collections_helper(read_time=DatetimeWithNanoseconds.now(), database=database) From d551aa22475e7d3d6b8a6459b1082d41e2dce19a Mon Sep 17 00:00:00 2001 From: Kevin Zheng Date: Thu, 30 Jan 2025 21:11:32 +0000 Subject: [PATCH 05/37] Conditionally adding read_time to kwargs for cleaner test separation between sync/async --- google/cloud/firestore_v1/base_query.py | 3 +- tests/unit/v1/test_query.py | 43 +++++++++++++------------ tests/unit/v1/test_transaction.py | 3 +- 3 files changed, 26 insertions(+), 23 deletions(-) diff --git a/google/cloud/firestore_v1/base_query.py b/google/cloud/firestore_v1/base_query.py index 0838db5267..51825c035d 100644 --- a/google/cloud/firestore_v1/base_query.py +++ b/google/cloud/firestore_v1/base_query.py @@ -1059,10 +1059,11 @@ def _prep_stream( "parent": parent_path, "structured_query": self._to_protobuf(), "transaction": _helpers.get_transaction_id(transaction), - "read_time": read_time, } if explain_options is not None: request["explain_options"] = explain_options._to_dict() + if read_time is not None: + request["read_time"] = read_time kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) return request, expected_prefix, kwargs diff --git a/tests/unit/v1/test_query.py b/tests/unit/v1/test_query.py index a6ce8fadf0..00744dad55 100644 --- a/tests/unit/v1/test_query.py +++ b/tests/unit/v1/test_query.py @@ -96,10 +96,11 @@ def _query_get_helper( "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, - "read_time": read_time, } if explain_options: request["explain_options"] = explain_options._to_dict() + if read_time: + request["read_time"] = read_time # Verify the mock call. firestore_api.run_query.assert_called_once_with( @@ -177,7 +178,6 @@ def test_query_get_limit_to_last(database): "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, - "read_time": None, }, metadata=client._rpc_metadata, ) @@ -405,10 +405,11 @@ def _query_stream_helper( "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, - "read_time": read_time, } if explain_options is not None: request["explain_options"] = explain_options._to_dict() + if read_time is not None: + request["read_time"] = read_time # Verify the mock call. firestore_api.run_query.assert_called_once_with( @@ -493,7 +494,6 @@ def test_query_stream_with_transaction(database): "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": txn_id, - "read_time": None, }, metadata=client._rpc_metadata, ) @@ -587,7 +587,6 @@ def test_query_stream_no_results(database): "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, - "read_time": None, }, metadata=client._rpc_metadata, ) @@ -623,7 +622,6 @@ def test_query_stream_second_response_in_empty_stream(database): "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, - "read_time": None, }, metadata=client._rpc_metadata, ) @@ -668,7 +666,6 @@ def test_query_stream_with_skipped_results(database): "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, - "read_time": None, }, metadata=client._rpc_metadata, ) @@ -713,7 +710,6 @@ def test_query_stream_empty_after_first_response(database): "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, - "read_time": None, }, metadata=client._rpc_metadata, ) @@ -761,7 +757,6 @@ def test_query_stream_w_collection_group(database): "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, - "read_time": None, }, metadata=client._rpc_metadata, ) @@ -849,27 +844,33 @@ def _stream_w_exception(*_args, **_kw): expected_transaction_id = transaction.id else: expected_transaction_id = None + + expected_request = { + "parent": parent_path, + "structured_query": query._to_protobuf(), + "transaction": expected_transaction_id, + } + if read_time is not None: + expected_request["read_time"] = read_time assert calls[0] == mock.call( - request={ - "parent": parent_path, - "structured_query": query._to_protobuf(), - "transaction": expected_transaction_id, - "read_time": read_time, - }, + request=expected_request, metadata=client._rpc_metadata, **kwargs, ) if expect_retry: new_query = query.start_after(snapshot) + expected_request = { + "parent": parent_path, + "structured_query": new_query._to_protobuf(), + "transaction": None, + } + if read_time is not None: + expected_request["read_time"] = read_time + assert calls[1] == mock.call( - request={ - "parent": parent_path, - "structured_query": new_query._to_protobuf(), - "transaction": None, - "read_time": read_time, - }, + request=expected_request, metadata=client._rpc_metadata, **kwargs, ) diff --git a/tests/unit/v1/test_transaction.py b/tests/unit/v1/test_transaction.py index 20cc3d652a..519a07e67c 100644 --- a/tests/unit/v1/test_transaction.py +++ b/tests/unit/v1/test_transaction.py @@ -472,10 +472,11 @@ def _transaction_get_w_query_helper( "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": b"beep-fail-commit", - "read_time": read_time, } if explain_options is not None: request["explain_options"] = explain_options._to_dict() + if read_time is not None: + request["read_time"] = read_time # Verify the mock call. firestore_api.run_query.assert_called_once_with( From 3151b8b515e70a01c8b5baf3a063630a70dd3971 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 30 Jan 2025 21:14:03 +0000 Subject: [PATCH 06/37] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20?= =?UTF-8?q?post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- tests/unit/v1/test_query.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/v1/test_query.py b/tests/unit/v1/test_query.py index 00744dad55..1df90a3a94 100644 --- a/tests/unit/v1/test_query.py +++ b/tests/unit/v1/test_query.py @@ -844,7 +844,7 @@ def _stream_w_exception(*_args, **_kw): expected_transaction_id = transaction.id else: expected_transaction_id = None - + expected_request = { "parent": parent_path, "structured_query": query._to_protobuf(), From 4e6f95edc288096deedb58e266d4b63020e90f3f Mon Sep 17 00:00:00 2001 From: Kevin Zheng Date: Fri, 31 Jan 2025 20:43:36 +0000 Subject: [PATCH 07/37] Conditionally adding read_time to kwargs in base_aggregation.py --- google/cloud/firestore_v1/base_aggregation.py | 3 +- tests/unit/v1/test_aggregation.py | 40 ++++++++++--------- 2 files changed, 23 insertions(+), 20 deletions(-) diff --git a/google/cloud/firestore_v1/base_aggregation.py b/google/cloud/firestore_v1/base_aggregation.py index 06dee8ac73..b49b435185 100644 --- a/google/cloud/firestore_v1/base_aggregation.py +++ b/google/cloud/firestore_v1/base_aggregation.py @@ -213,10 +213,11 @@ def _prep_stream( "parent": parent_path, "structured_aggregation_query": self._to_protobuf(), "transaction": _helpers.get_transaction_id(transaction), - "read_time": read_time, } if explain_options: request["explain_options"] = explain_options._to_dict() + if read_time is not None: + request["read_time"] = read_time kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) return request, kwargs diff --git a/tests/unit/v1/test_aggregation.py b/tests/unit/v1/test_aggregation.py index c3d90e3056..236526e641 100644 --- a/tests/unit/v1/test_aggregation.py +++ b/tests/unit/v1/test_aggregation.py @@ -327,7 +327,6 @@ def test_aggregation_query_prep_stream(): "parent": parent_path, "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": None, - "read_time": None, } assert request == expected_request assert kwargs == {"retry": None} @@ -354,7 +353,6 @@ def test_aggregation_query_prep_stream_with_transaction(): "parent": parent_path, "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": txn_id, - "read_time": None, } assert request == expected_request assert kwargs == {"retry": None} @@ -381,7 +379,6 @@ def test_aggregation_query_prep_stream_with_explain_options(): "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": None, "explain_options": explain_options._to_dict(), - "read_time": None, } assert request == expected_request assert kwargs == {"retry": None} @@ -464,10 +461,11 @@ def _aggregation_query_get_helper( "parent": parent_path, "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": None, - "read_time": query_read_time, } if explain_options is not None: expected_request["explain_options"] = explain_options._to_dict() + if query_read_time is not None: + expected_request["read_time"] = query_read_time # Verify the mock call. firestore_api.run_aggregation_query.assert_called_once_with( @@ -548,7 +546,6 @@ def test_aggregation_query_get_transaction(): "parent": parent_path, "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": txn_id, - "read_time": None, }, metadata=client._rpc_metadata, **kwargs, @@ -644,25 +641,30 @@ def _stream_w_exception(*_args, **_kw): else: expected_transaction_id = None + expected_request = { + "parent": parent_path, + "structured_aggregation_query": aggregation_query._to_protobuf(), + "transaction": expected_transaction_id, + } + if read_time is not None: + expected_request["read_time"] = read_time + assert calls[0] == mock.call( - request={ - "parent": parent_path, - "structured_aggregation_query": aggregation_query._to_protobuf(), - "transaction": expected_transaction_id, - "read_time": read_time, - }, + request=expected_request, metadata=client._rpc_metadata, **kwargs, ) if expect_retry: + expected_request = { + "parent": parent_path, + "structured_aggregation_query": aggregation_query._to_protobuf(), + "transaction": None, + } + if read_time is not None: + expected_request["read_time"] = None assert calls[1] == mock.call( - request={ - "parent": parent_path, - "structured_aggregation_query": aggregation_query._to_protobuf(), - "transaction": None, - "read_time": read_time, - }, + request=expected_request, metadata=client._rpc_metadata, **kwargs, ) @@ -759,10 +761,11 @@ def _aggregation_query_stream_helper( "parent": parent_path, "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": None, - "read_time": read_time, } if explain_options is not None: expected_request["explain_options"] = explain_options._to_dict() + if read_time is not None: + expected_request["read_time"] = read_time # Verify the mock call. firestore_api.run_aggregation_query.assert_called_once_with( @@ -850,7 +853,6 @@ def test_aggregation_from_query(): "parent": parent_path, "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": txn_id, - "read_time": None, }, metadata=client._rpc_metadata, **kwargs, From 2ed7eb5d55c987d5fca027e2499741943594b5a6 Mon Sep 17 00:00:00 2001 From: Kevin Zheng Date: Mon, 3 Feb 2025 17:17:42 +0000 Subject: [PATCH 08/37] More conditionally adding read_time to kwargs --- google/cloud/firestore_v1/base_client.py | 6 ++-- google/cloud/firestore_v1/base_collection.py | 3 +- google/cloud/firestore_v1/base_document.py | 6 ++-- google/cloud/firestore_v1/base_query.py | 3 +- tests/unit/v1/test_client.py | 27 ++++++++++-------- tests/unit/v1/test_collection.py | 19 +++++++------ tests/unit/v1/test_cross_language.py | 1 - tests/unit/v1/test_document.py | 30 ++++++++++++-------- tests/unit/v1/test_query.py | 14 +++++---- 9 files changed, 64 insertions(+), 45 deletions(-) diff --git a/google/cloud/firestore_v1/base_client.py b/google/cloud/firestore_v1/base_client.py index 864a9d5835..17734214dc 100644 --- a/google/cloud/firestore_v1/base_client.py +++ b/google/cloud/firestore_v1/base_client.py @@ -448,8 +448,9 @@ def _prep_get_all( "documents": document_paths, "mask": mask, "transaction": _helpers.get_transaction_id(transaction), - "read_time": read_time, } + if read_time is not None: + request["read_time"] = read_time kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) return request, reference_map, kwargs @@ -477,8 +478,9 @@ def _prep_collections( """Shared setup for async/sync :meth:`collections`.""" request = { "parent": "{}/documents".format(self._database_string), - "read_time": read_time, } + if read_time is not None: + request["read_time"] = read_time kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) return request, kwargs diff --git a/google/cloud/firestore_v1/base_collection.py b/google/cloud/firestore_v1/base_collection.py index 3c80b16adc..b27f87e108 100644 --- a/google/cloud/firestore_v1/base_collection.py +++ b/google/cloud/firestore_v1/base_collection.py @@ -217,8 +217,9 @@ def _prep_list_documents( # include any fields. To save on data transfer, we can set a field_path mask # to include no fields "mask": {"field_paths": None}, - "read_time": read_time, } + if read_time is not None: + request["read_time"] = read_time kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) return request, kwargs diff --git a/google/cloud/firestore_v1/base_document.py b/google/cloud/firestore_v1/base_document.py index ccbe5abf72..66de2d1bc7 100644 --- a/google/cloud/firestore_v1/base_document.py +++ b/google/cloud/firestore_v1/base_document.py @@ -307,8 +307,9 @@ def _prep_batch_get( "documents": [self._document_path], "mask": mask, "transaction": _helpers.get_transaction_id(transaction), - "read_time": read_time, } + if read_time is not None: + request["read_time"] = read_time kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) return request, kwargs @@ -335,8 +336,9 @@ def _prep_collections( request = { "parent": self._document_path, "page_size": page_size, - "read_time": read_time, } + if read_time is not None: + request["read_time"] = read_time kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) return request, kwargs diff --git a/google/cloud/firestore_v1/base_query.py b/google/cloud/firestore_v1/base_query.py index 51825c035d..32d16233fc 100644 --- a/google/cloud/firestore_v1/base_query.py +++ b/google/cloud/firestore_v1/base_query.py @@ -1448,8 +1448,9 @@ def _prep_get_partitions( "parent": parent_path, "structured_query": query._to_protobuf(), "partition_count": partition_count, - "read_time": read_time, } + if read_time is not None: + request["read_time"] = read_time kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) return request, kwargs diff --git a/tests/unit/v1/test_client.py b/tests/unit/v1/test_client.py index 997341f221..51a6b7f449 100644 --- a/tests/unit/v1/test_client.py +++ b/tests/unit/v1/test_client.py @@ -307,11 +307,13 @@ def __iter__(self): assert collection.id == collection_id base_path = client._database_string + "/documents" + expected_request = { + "parent": base_path, + } + if read_time is not None: + expected_request["read_time"] = read_time firestore_api.list_collection_ids.assert_called_once_with( - request={ - "parent": base_path, - "read_time": read_time, - }, + request=expected_request, metadata=client._rpc_metadata, **kwargs, ) @@ -417,15 +419,17 @@ def _get_all_helper( mask = common.DocumentMask(field_paths=field_paths) kwargs.pop("transaction", None) + expected_request = { + "database": client._database_string, + "documents": doc_paths, + "mask": mask, + "transaction": txn_id, + } + if read_time is not None: + expected_request["read_time"] = read_time client._firestore_api.batch_get_documents.assert_called_once_with( - request={ - "database": client._database_string, - "documents": doc_paths, - "mask": mask, - "transaction": txn_id, - "read_time": read_time, - }, + request=expected_request, metadata=client._rpc_metadata, **kwargs, ) @@ -490,7 +494,6 @@ def test_client_get_all_unknown_result(database): "documents": doc_paths, "mask": None, "transaction": None, - "read_time": None, }, metadata=client._rpc_metadata, ) diff --git a/tests/unit/v1/test_collection.py b/tests/unit/v1/test_collection.py index 006032213b..490e0ff1be 100644 --- a/tests/unit/v1/test_collection.py +++ b/tests/unit/v1/test_collection.py @@ -318,15 +318,18 @@ def _next_page(self): assert document.id == document_id parent, _ = collection._parent_info() + expected_request = { + "parent": parent, + "collection_id": collection.id, + "page_size": page_size, + "show_missing": True, + "mask": {"field_paths": None}, + } + if read_time is not None: + expected_request["read_time"] = read_time + api_client.list_documents.assert_called_once_with( - request={ - "parent": parent, - "collection_id": collection.id, - "page_size": page_size, - "show_missing": True, - "mask": {"field_paths": None}, - "read_time": read_time, - }, + request=expected_request, metadata=client._rpc_metadata, **kwargs, ) diff --git a/tests/unit/v1/test_cross_language.py b/tests/unit/v1/test_cross_language.py index a5ccb16be9..d2adeb2ba6 100644 --- a/tests/unit/v1/test_cross_language.py +++ b/tests/unit/v1/test_cross_language.py @@ -154,7 +154,6 @@ def test_get_testprotos(test_proto): "documents": [doc._document_path], "mask": None, "transaction": None, - "read_time": None, } firestore_api.batch_get_documents.assert_called_once_with( diff --git a/tests/unit/v1/test_document.py b/tests/unit/v1/test_document.py index 19686d244d..81dedc9870 100644 --- a/tests/unit/v1/test_document.py +++ b/tests/unit/v1/test_document.py @@ -468,14 +468,17 @@ def WhichOneof(val): else: expected_transaction_id = None + expected_request = { + "database": client._database_string, + "documents": [document_reference._document_path], + "mask": mask, + "transaction": expected_transaction_id, + } + if read_time is not None: + expected_request["read_time"] = read_time + firestore_api.batch_get_documents.assert_called_once_with( - request={ - "database": client._database_string, - "documents": [document_reference._document_path], - "mask": mask, - "transaction": expected_transaction_id, - "read_time": read_time, - }, + request=expected_request, metadata=client._rpc_metadata, **kwargs, ) @@ -566,13 +569,16 @@ def __iter__(self): assert isinstance(collection, CollectionReference) assert collection.parent == document assert collection.id == collection_id + + expected_result = { + "parent": document._document_path, + "page_size": page_size, + } + if read_time is not None: + expected_result["read_time"] = read_time api_client.list_collection_ids.assert_called_once_with( - request={ - "parent": document._document_path, - "page_size": page_size, - "read_time": read_time, - }, + request=expected_result, metadata=client._rpc_metadata, **kwargs, ) diff --git a/tests/unit/v1/test_query.py b/tests/unit/v1/test_query.py index 1df90a3a94..cb2400b503 100644 --- a/tests/unit/v1/test_query.py +++ b/tests/unit/v1/test_query.py @@ -970,13 +970,15 @@ def _collection_group_get_partitions_helper( parent, orders=(query._make_order("__name__", query.ASCENDING),), ) + expected_request = { + "parent": parent_path, + "structured_query": partition_query._to_protobuf(), + "partition_count": 2, + } + if read_time is not None: + expected_request["read_time"] = read_time firestore_api.partition_query.assert_called_once_with( - request={ - "parent": parent_path, - "structured_query": partition_query._to_protobuf(), - "partition_count": 2, - "read_time": read_time, - }, + request=expected_request, metadata=client._rpc_metadata, **kwargs, ) From 402fe492ab3269788fdc35e7c1c4c0cdab940e2e Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Mon, 3 Feb 2025 17:20:22 +0000 Subject: [PATCH 09/37] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20?= =?UTF-8?q?post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- tests/unit/v1/test_document.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/v1/test_document.py b/tests/unit/v1/test_document.py index 81dedc9870..fdb8fdd08b 100644 --- a/tests/unit/v1/test_document.py +++ b/tests/unit/v1/test_document.py @@ -569,7 +569,7 @@ def __iter__(self): assert isinstance(collection, CollectionReference) assert collection.parent == document assert collection.id == collection_id - + expected_result = { "parent": document._document_path, "page_size": page_size, From 39ae8789d8b1c9a52bebabef24c54e69dbdcc4ce Mon Sep 17 00:00:00 2001 From: Kevin Zheng Date: Mon, 3 Feb 2025 18:34:34 +0000 Subject: [PATCH 10/37] Added tests for collections.get/stream --- tests/unit/v1/test_collection.py | 38 +++++++++++++++++++++++++++++++- 1 file changed, 37 insertions(+), 1 deletion(-) diff --git a/tests/unit/v1/test_collection.py b/tests/unit/v1/test_collection.py index 490e0ff1be..574ec01938 100644 --- a/tests/unit/v1/test_collection.py +++ b/tests/unit/v1/test_collection.py @@ -16,7 +16,7 @@ import mock -from datetime import datetime +from datetime import datetime, timezone from tests.unit.v1._test_helpers import DEFAULT_TEST_PROJECT @@ -418,6 +418,24 @@ def test_get_w_explain_options(query_class): ) +@mock.patch("google.cloud.firestore_v1.query.Query", autospec=True) +def test_get_w_read_time(query_class): + from google.cloud.firestore_v1.query_profile import ExplainOptions + + read_time = datetime.now(tz=timezone.utc) + collection = _make_collection_reference("collection") + get_response = collection.get(read_time=read_time) + + query_class.assert_called_once_with(collection) + query_instance = query_class.return_value + + assert get_response is query_instance.get.return_value + query_instance.get.assert_called_once_with( + transaction=None, + read_time=read_time, + ) + + @mock.patch("google.cloud.firestore_v1.query.Query", autospec=True) def test_stream(query_class): collection = _make_collection_reference("collection") @@ -478,6 +496,24 @@ def test_stream_w_explain_options(query_class): ) +@mock.patch("google.cloud.firestore_v1.query.Query", autospec=True) +def test_stream_w_read_time(query_class): + from google.cloud.firestore_v1.query_profile import ExplainOptions + + read_time = datetime.now(tz=timezone.utc) + collection = _make_collection_reference("collection") + get_response = collection.stream(read_time=read_time) + + query_class.assert_called_once_with(collection) + query_instance = query_class.return_value + + assert get_response is query_instance.stream.return_value + query_instance.stream.assert_called_once_with( + transaction=None, + read_time=read_time, + ) + + @mock.patch("google.cloud.firestore_v1.collection.Watch", autospec=True) def test_on_snapshot(watch): collection = _make_collection_reference("collection") From d85f2f08f95c8ca28748361559cd15e55c296868 Mon Sep 17 00:00:00 2001 From: Kevin Zheng Date: Mon, 3 Feb 2025 18:53:45 +0000 Subject: [PATCH 11/37] linting --- tests/unit/v1/test_collection.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/tests/unit/v1/test_collection.py b/tests/unit/v1/test_collection.py index 574ec01938..da91651b95 100644 --- a/tests/unit/v1/test_collection.py +++ b/tests/unit/v1/test_collection.py @@ -420,8 +420,6 @@ def test_get_w_explain_options(query_class): @mock.patch("google.cloud.firestore_v1.query.Query", autospec=True) def test_get_w_read_time(query_class): - from google.cloud.firestore_v1.query_profile import ExplainOptions - read_time = datetime.now(tz=timezone.utc) collection = _make_collection_reference("collection") get_response = collection.get(read_time=read_time) @@ -498,8 +496,6 @@ def test_stream_w_explain_options(query_class): @mock.patch("google.cloud.firestore_v1.query.Query", autospec=True) def test_stream_w_read_time(query_class): - from google.cloud.firestore_v1.query_profile import ExplainOptions - read_time = datetime.now(tz=timezone.utc) collection = _make_collection_reference("collection") get_response = collection.stream(read_time=read_time) From a8ced8f218a4b8105ad7106926231890fa6ec528 Mon Sep 17 00:00:00 2001 From: Kevin Zheng Date: Fri, 14 Feb 2025 18:08:54 +0000 Subject: [PATCH 12/37] Addressed review feedback --- google/cloud/firestore_v1/aggregation.py | 34 ++++----- google/cloud/firestore_v1/base_aggregation.py | 25 +++---- google/cloud/firestore_v1/base_client.py | 10 +-- google/cloud/firestore_v1/base_collection.py | 10 +-- google/cloud/firestore_v1/base_document.py | 11 +-- google/cloud/firestore_v1/base_query.py | 13 ++-- google/cloud/firestore_v1/base_transaction.py | 7 +- google/cloud/firestore_v1/client.py | 25 +++---- google/cloud/firestore_v1/collection.py | 27 ++++---- google/cloud/firestore_v1/document.py | 12 ++-- google/cloud/firestore_v1/query.py | 11 +-- google/cloud/firestore_v1/transaction.py | 21 +++--- tests/system/test_system.py | 13 +++- tests/unit/v1/test_aggregation.py | 69 +++++++++++++++++++ 14 files changed, 189 insertions(+), 99 deletions(-) diff --git a/google/cloud/firestore_v1/aggregation.py b/google/cloud/firestore_v1/aggregation.py index a19cb2464b..73fa813c7c 100644 --- a/google/cloud/firestore_v1/aggregation.py +++ b/google/cloud/firestore_v1/aggregation.py @@ -20,7 +20,8 @@ """ from __future__ import annotations -from datetime import datetime +import datetime + from typing import TYPE_CHECKING, Any, Generator, List, Optional, Union from google.api_core import exceptions, gapic_v1 @@ -57,7 +58,7 @@ def get( timeout: float | None = None, *, explain_options: Optional[ExplainOptions] = None, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ) -> QueryResultsList[AggregationResult]: """Runs the aggregation query. @@ -80,10 +81,11 @@ def get( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. - read_time (Optional[datetime]): If set, reads documents as they were at the given - time. This must be a microsecond precision timestamp within the past one hour, - or if Point-in-Time Recovery is enabled, can additionally be a whole minute - timestamp within the past 7 days. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. + Returns: QueryResultsList[AggregationResult]: The aggregation query results. @@ -142,7 +144,7 @@ def _make_stream( retry: Union[retries.Retry, None, object] = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, explain_options: Optional[ExplainOptions] = None, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ) -> Generator[List[AggregationResult], Any, Optional[ExplainMetrics]]: """Internal method for stream(). Runs the aggregation query. @@ -166,10 +168,10 @@ def _make_stream( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. - read_time (Optional[datetime]): If set, reads documents as they were at the given - time. This must be a microsecond precision timestamp within the past one hour, - or if Point-in-Time Recovery is enabled, can additionally be a whole minute - timestamp within the past 7 days. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Yields: List[AggregationResult]: @@ -224,7 +226,7 @@ def stream( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ) -> StreamGenerator[List[AggregationResult]]: """Runs the aggregation query. @@ -248,10 +250,10 @@ def stream( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. - read_time (Optional[datetime]): If set, reads documents as they were at the given - time. This must be a microsecond precision timestamp within the past one hour, - or if Point-in-Time Recovery is enabled, can additionally be a whole minute - timestamp within the past 7 days. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Returns: `StreamGenerator[List[AggregationResult]]`: diff --git a/google/cloud/firestore_v1/base_aggregation.py b/google/cloud/firestore_v1/base_aggregation.py index b49b435185..d99c371094 100644 --- a/google/cloud/firestore_v1/base_aggregation.py +++ b/google/cloud/firestore_v1/base_aggregation.py @@ -21,8 +21,9 @@ from __future__ import annotations import abc +import datetime + from abc import ABC -from datetime import datetime from typing import TYPE_CHECKING, Any, Coroutine, List, Optional, Tuple, Union from google.api_core import gapic_v1 @@ -206,7 +207,7 @@ def _prep_stream( retry: Union[retries.Retry, retries.AsyncRetry, None, object] = None, timeout: float | None = None, explain_options: Optional[ExplainOptions] = None, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ) -> Tuple[dict, dict]: parent_path, expected_prefix = self._collection_ref._parent_info() request = { @@ -232,7 +233,7 @@ def get( timeout: float | None = None, *, explain_options: Optional[ExplainOptions] = None, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ) -> ( QueryResultsList[AggregationResult] | Coroutine[Any, Any, List[List[AggregationResult]]] @@ -258,10 +259,10 @@ def get( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. - read_time (Optional[datetime]): If set, reads documents as they were at the given - time. This must be a microsecond precision timestamp within the past one hour, - or if Point-in-Time Recovery is enabled, can additionally be a whole minute - timestamp within the past 7 days. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Returns: (QueryResultsList[List[AggregationResult]] | Coroutine[Any, Any, List[List[AggregationResult]]]): @@ -279,7 +280,7 @@ def stream( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ) -> ( StreamGenerator[List[AggregationResult]] | AsyncStreamGenerator[List[AggregationResult]] @@ -301,10 +302,10 @@ def stream( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. - read_time (Optional[datetime]): If set, reads documents as they were at the given - time. This must be a microsecond precision timestamp within the past one hour, - or if Point-in-Time Recovery is enabled, can additionally be a whole minute - timestamp within the past 7 days. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Returns: StreamGenerator[List[AggregationResult]] | AsyncStreamGenerator[List[AggregationResult]]: diff --git a/google/cloud/firestore_v1/base_client.py b/google/cloud/firestore_v1/base_client.py index 17734214dc..c8504f39db 100644 --- a/google/cloud/firestore_v1/base_client.py +++ b/google/cloud/firestore_v1/base_client.py @@ -25,8 +25,8 @@ """ from __future__ import annotations +import datetime import os -from datetime import datetime from typing import ( Any, AsyncGenerator, @@ -438,7 +438,7 @@ def _prep_get_all( transaction: BaseTransaction | None = None, retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: float | None = None, - read_time: datetime | None = None, + read_time: Optional[datetime.datetime] = None, ) -> Tuple[dict, dict, dict]: """Shared setup for async/sync :meth:`get_all`.""" document_paths, reference_map = _reference_info(references) @@ -463,7 +463,7 @@ def get_all( retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: float | None = None, *, - read_time: datetime | None = None, + read_time: Optional[datetime.datetime] = None, ) -> Union[ AsyncGenerator[DocumentSnapshot, Any], Generator[DocumentSnapshot, Any, Any] ]: @@ -473,7 +473,7 @@ def _prep_collections( self, retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: float | None = None, - read_time: datetime | None = None, + read_time: Optional[datetime.datetime] = None, ) -> Tuple[dict, dict]: """Shared setup for async/sync :meth:`collections`.""" request = { @@ -490,7 +490,7 @@ def collections( retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: float | None = None, *, - read_time: datetime | None = None, + read_time: Optional[datetime.datetime] = None, ): raise NotImplementedError diff --git a/google/cloud/firestore_v1/base_collection.py b/google/cloud/firestore_v1/base_collection.py index b27f87e108..c3411d3c2e 100644 --- a/google/cloud/firestore_v1/base_collection.py +++ b/google/cloud/firestore_v1/base_collection.py @@ -15,9 +15,9 @@ """Classes for representing collections for the Google Cloud Firestore API.""" from __future__ import annotations +import datetime import random -from datetime import datetime from typing import ( TYPE_CHECKING, Any, @@ -204,7 +204,7 @@ def _prep_list_documents( page_size: Optional[int] = None, retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: Optional[float] = None, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ) -> Tuple[dict, dict]: """Shared setup for async / sync :method:`list_documents`""" parent, _ = self._parent_info() @@ -230,7 +230,7 @@ def list_documents( retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: Optional[float] = None, *, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ) -> Union[ Generator[DocumentReference, Any, Any], AsyncGenerator[DocumentReference, Any] ]: @@ -504,7 +504,7 @@ def get( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ) -> ( QueryResultsList[DocumentSnapshot] | Coroutine[Any, Any, QueryResultsList[DocumentSnapshot]] @@ -518,7 +518,7 @@ def stream( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ) -> StreamGenerator[DocumentSnapshot] | AsyncIterator[DocumentSnapshot]: raise NotImplementedError diff --git a/google/cloud/firestore_v1/base_document.py b/google/cloud/firestore_v1/base_document.py index 66de2d1bc7..921c3aab7e 100644 --- a/google/cloud/firestore_v1/base_document.py +++ b/google/cloud/firestore_v1/base_document.py @@ -16,6 +16,8 @@ from __future__ import annotations import copy +import datetime + from typing import ( TYPE_CHECKING, Any, @@ -26,7 +28,6 @@ Union, Awaitable, ) -from datetime import datetime from google.api_core import retry as retries @@ -291,7 +292,7 @@ def _prep_batch_get( transaction=None, retry: retries.Retry | retries.AsyncRetry | None | object = None, timeout: float | None = None, - read_time: datetime | None = None, + read_time: Optional[datetime.datetime] = None, ) -> Tuple[dict, dict]: """Shared setup for async/sync :meth:`get`.""" if isinstance(field_paths, str): @@ -321,7 +322,7 @@ def get( retry: retries.Retry | retries.AsyncRetry | None | object = None, timeout: float | None = None, *, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ) -> "DocumentSnapshot" | Awaitable["DocumentSnapshot"]: raise NotImplementedError @@ -330,7 +331,7 @@ def _prep_collections( page_size: int | None = None, retry: retries.Retry | retries.AsyncRetry | None | object = None, timeout: float | None = None, - read_time: datetime | None = None, + read_time: Optional[datetime.datetime] = None, ) -> Tuple[dict, dict]: """Shared setup for async/sync :meth:`collections`.""" request = { @@ -349,7 +350,7 @@ def collections( retry: retries.Retry | retries.AsyncRetry | None | object = None, timeout: float | None = None, *, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ): raise NotImplementedError diff --git a/google/cloud/firestore_v1/base_query.py b/google/cloud/firestore_v1/base_query.py index 32d16233fc..671fd09e1b 100644 --- a/google/cloud/firestore_v1/base_query.py +++ b/google/cloud/firestore_v1/base_query.py @@ -22,9 +22,10 @@ import abc import copy +import datetime import math import warnings -from datetime import datetime + from typing import ( TYPE_CHECKING, Any, @@ -1032,7 +1033,7 @@ def get( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ) -> ( QueryResultsList[DocumentSnapshot] | Coroutine[Any, Any, QueryResultsList[DocumentSnapshot]] @@ -1045,7 +1046,7 @@ def _prep_stream( retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: Optional[float] = None, explain_options: Optional[ExplainOptions] = None, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ) -> Tuple[dict, str, dict]: """Shared setup for async / sync :meth:`stream`""" if self._limit_to_last: @@ -1075,7 +1076,7 @@ def stream( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ) -> ( StreamGenerator[document.DocumentSnapshot] | AsyncStreamGenerator[DocumentSnapshot] @@ -1432,7 +1433,7 @@ def _prep_get_partitions( partition_count, retry: retries.Retry | object | None = None, timeout: float | None = None, - read_time: datetime | None = None, + read_time: Optional[datetime.datetime] = None, ) -> Tuple[dict, dict]: self._validate_partition_query() parent_path, expected_prefix = self._parent._parent_info() @@ -1461,7 +1462,7 @@ def get_partitions( retry: Optional[retries.Retry] = None, timeout: Optional[float] = None, *, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ): raise NotImplementedError diff --git a/google/cloud/firestore_v1/base_transaction.py b/google/cloud/firestore_v1/base_transaction.py index 2e21ead37c..303131312b 100644 --- a/google/cloud/firestore_v1/base_transaction.py +++ b/google/cloud/firestore_v1/base_transaction.py @@ -15,7 +15,8 @@ """Helpers for applying Google Cloud Firestore changes in a transaction.""" from __future__ import annotations -from datetime import datetime +import datetime + from typing import ( TYPE_CHECKING, Any, @@ -150,7 +151,7 @@ def get_all( retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: float | None = None, *, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ) -> ( Generator[DocumentSnapshot, Any, None] | Coroutine[Any, Any, AsyncGenerator[DocumentSnapshot, Any]] @@ -164,7 +165,7 @@ def get( timeout: float | None = None, *, explain_options: Optional[ExplainOptions] = None, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ) -> ( StreamGenerator[DocumentSnapshot] | Generator[DocumentSnapshot, Any, None] diff --git a/google/cloud/firestore_v1/client.py b/google/cloud/firestore_v1/client.py index 5de5dc6aec..efe3f29bd2 100644 --- a/google/cloud/firestore_v1/client.py +++ b/google/cloud/firestore_v1/client.py @@ -25,7 +25,8 @@ """ from __future__ import annotations -from datetime import datetime +import datetime + from typing import TYPE_CHECKING, Any, Generator, Iterable, List, Optional, Union from google.api_core import gapic_v1 @@ -202,12 +203,12 @@ def document(self, *document_path: str) -> DocumentReference: def get_all( self, references: list, - field_paths: Iterable[str] = None, - transaction: Transaction = None, + field_paths: Iterable[str] | None = None, + transaction: Transaction | None = None, retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: float | None = None, *, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ) -> Generator[DocumentSnapshot, Any, None]: """Retrieve a batch of documents. @@ -243,9 +244,9 @@ def get_all( timeout (float): The timeout for this request. Defaults to a system-specified value. read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given - time. This must be a microsecond precision timestamp within the past one hour, or - if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp - within the past 7 days. For the most accurate results, use UTC timezone. + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Yields: .DocumentSnapshot: The next document snapshot that fulfills the @@ -269,7 +270,7 @@ def collections( retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, timeout: float = None, *, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ) -> Generator[Any, Any, None]: """List top-level collections of the client's database. @@ -279,9 +280,9 @@ def collections( timeout (float): The timeout for this request. Defaults to a system-specified value. read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given - time. This must be a microsecond precision timestamp within the past one hour, or - if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp - within the past 7 days. For the most accurate results, use UTC timezone. + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Returns: Sequence[:class:`~google.cloud.firestore_v1.collection.CollectionReference`]: diff --git a/google/cloud/firestore_v1/collection.py b/google/cloud/firestore_v1/collection.py index 204ee05363..e37f6ad0fc 100644 --- a/google/cloud/firestore_v1/collection.py +++ b/google/cloud/firestore_v1/collection.py @@ -15,7 +15,8 @@ """Classes for representing collections for the Google Cloud Firestore API.""" from __future__ import annotations -from datetime import datetime +import datetime + from typing import TYPE_CHECKING, Any, Callable, Generator, Optional, Tuple, Union from google.api_core import gapic_v1 @@ -139,7 +140,7 @@ def list_documents( retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, timeout: Union[float, None] = None, *, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ) -> Generator[Any, Any, None]: """List all subdocuments of the current collection. @@ -152,9 +153,9 @@ def list_documents( timeout (float): The timeout for this request. Defaults to a system-specified value. read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given - time. This must be a microsecond precision timestamp within the past one hour, or - if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp - within the past 7 days. For the most accurate results, use UTC timezone. + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Returns: Sequence[:class:`~google.cloud.firestore_v1.collection.DocumentReference`]: @@ -183,7 +184,7 @@ def get( timeout: Union[float, None] = None, *, explain_options: Optional[ExplainOptions] = None, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ) -> QueryResultsList[DocumentSnapshot]: """Read the documents in this collection. @@ -203,9 +204,9 @@ def get( Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given - time. This must be a microsecond precision timestamp within the past one hour, or - if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp - within the past 7 days. For the most accurate results, use UTC timezone. + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. If a ``transaction`` is used and it already has write operations added, this method cannot be used (i.e. read-after-write is not @@ -230,7 +231,7 @@ def stream( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ) -> StreamGenerator[DocumentSnapshot]: """Read the documents in this collection. @@ -263,9 +264,9 @@ def stream( Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given - time. This must be a microsecond precision timestamp within the past one hour, or - if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp - within the past 7 days. For the most accurate results, use UTC timezone. + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Returns: `StreamGenerator[DocumentSnapshot]`: A generator of the query results. diff --git a/google/cloud/firestore_v1/document.py b/google/cloud/firestore_v1/document.py index 436bec0665..c8248b9b89 100644 --- a/google/cloud/firestore_v1/document.py +++ b/google/cloud/firestore_v1/document.py @@ -390,9 +390,9 @@ def get( timeout (float): The timeout for this request. Defaults to a system-specified value. read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given - time. This must be a microsecond precision timestamp within the past one hour, or - if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp - within the past 7 days. For the most accurate results, use UTC timezone. + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Returns: :class:`~google.cloud.firestore_v1.base_document.DocumentSnapshot`: @@ -456,9 +456,9 @@ def collections( timeout (float): The timeout for this request. Defaults to a system-specified value. read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given - time. This must be a microsecond precision timestamp within the past one hour, or - if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp - within the past 7 days. For the most accurate results, use UTC timezone. + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Returns: Sequence[:class:`~google.cloud.firestore_v1.collection.CollectionReference`]: diff --git a/google/cloud/firestore_v1/query.py b/google/cloud/firestore_v1/query.py index 859c780e49..ad4f89834e 100644 --- a/google/cloud/firestore_v1/query.py +++ b/google/cloud/firestore_v1/query.py @@ -20,7 +20,8 @@ """ from __future__ import annotations -from datetime import datetime +import datetime + from typing import TYPE_CHECKING, Any, Callable, Generator, List, Optional, Type from google.api_core import exceptions, gapic_v1 @@ -142,7 +143,7 @@ def get( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ) -> QueryResultsList[DocumentSnapshot]: """Read the documents in the collection that match this query. @@ -363,7 +364,7 @@ def _make_stream( retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, explain_options: Optional[ExplainOptions] = None, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ) -> Generator[DocumentSnapshot, Any, Optional[ExplainMetrics]]: """Internal method for stream(). Read the documents in the collection that match this query. @@ -465,7 +466,7 @@ def stream( timeout: float | None = None, *, explain_options: Optional[ExplainOptions] = None, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ) -> StreamGenerator[DocumentSnapshot]: """Read the documents in the collection that match this query. @@ -604,7 +605,7 @@ def get_partitions( retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, *, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ) -> Generator[QueryPartition, None, None]: """Partition a query for parallelization. diff --git a/google/cloud/firestore_v1/transaction.py b/google/cloud/firestore_v1/transaction.py index 05b84daa0a..76c50499c1 100644 --- a/google/cloud/firestore_v1/transaction.py +++ b/google/cloud/firestore_v1/transaction.py @@ -15,7 +15,8 @@ """Helpers for applying Google Cloud Firestore changes in a transaction.""" from __future__ import annotations -from datetime import datetime +import datetime + from typing import TYPE_CHECKING, Any, Callable, Generator, Optional from google.api_core import exceptions, gapic_v1 @@ -156,7 +157,7 @@ def get_all( retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, *, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ) -> Generator[DocumentSnapshot, Any, None]: """Retrieves multiple documents from Firestore. @@ -167,10 +168,10 @@ def get_all( should be retried. Defaults to a system-specified policy. timeout (float): The timeout for this request. Defaults to a system-specified value. - read_time (Optional[datetime]): If set, reads documents as they were at the given - time. This must be a microsecond precision timestamp within the past one hour, - or if Point-in-Time Recovery is enabled, can additionally be a whole minute - timestamp within the past 7 days. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Yields: .DocumentSnapshot: The next document snapshot that fulfills the @@ -188,7 +189,7 @@ def get( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, - read_time: Optional[datetime] = None, + read_time: Optional[datetime.datetime] = None, ) -> StreamGenerator[DocumentSnapshot] | Generator[DocumentSnapshot, Any, None]: """Retrieve a document or a query result from the database. @@ -205,9 +206,9 @@ def get( explain_metrics will be available on the returned generator. Can only be used when running a query, not a document reference. read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given - time. This must be a microsecond precision timestamp within the past one hour, or - if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp - within the past 7 days. For the most accurate results, use UTC timezone. + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Yields: .DocumentSnapshot: The next document snapshot that fulfills the diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 205b39f35a..f8e4126df6 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -3429,7 +3429,7 @@ def in_transaction(transaction): @pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) def test_query_in_transaction_with_read_time(client, cleanup, database): """ - Test query profiling in transactions. + Test read_time in transactions. """ collection_id = "doc-create" + UNIQUE_RESOURCE_ID doc_ids = [f"doc{i}" + UNIQUE_RESOURCE_ID for i in range(5)] @@ -3440,6 +3440,8 @@ def test_query_in_transaction_with_read_time(client, cleanup, database): doc_refs[1].create({"a": 1, "b": 1}) read_time = max(docref.get().read_time for docref in doc_refs) + past_read_time = read_time + datetime.timedelta(days=-10) + future_read_time = read_time + datetime.timedelta(days=10) doc_refs[2].create({"a": 1, "b": 3}) collection = client.collection(collection_id) @@ -3465,6 +3467,15 @@ def in_transaction(transaction): assert 2 in new_b_values assert 3 in new_b_values + # Read times too far in the past fail with either InvalidArgument + # if the read_time dates before the DB was created, or FailedPrecondition + # otherwise. + with pytest.raises((InvalidArgument, FailedPrecondition)): + [docs.get("b") for docs in transaction.get(query, read_time=past_read_time)] + + with pytest.raises(InvalidArgument): + [docs.get("b") for docs in transaction.get(query, read_time=future_read_time)] + inner_fn_ran = True in_transaction(transaction) diff --git a/tests/unit/v1/test_aggregation.py b/tests/unit/v1/test_aggregation.py index 236526e641..c1039e2335 100644 --- a/tests/unit/v1/test_aggregation.py +++ b/tests/unit/v1/test_aggregation.py @@ -26,6 +26,8 @@ from google.cloud.firestore_v1.query_profile import ExplainMetrics, QueryExplainError from google.cloud.firestore_v1.query_results import QueryResultsList from google.cloud.firestore_v1.stream_generator import StreamGenerator +from google.cloud.firestore_v1.types import RunAggregationQueryRequest, RunAggregationQueryResponse +from google.protobuf.timestamp_pb2 import Timestamp from tests.unit.v1._test_helpers import ( make_aggregation_query, make_aggregation_query_response, @@ -384,6 +386,73 @@ def test_aggregation_query_prep_stream_with_explain_options(): assert kwargs == {"retry": None} +def test_aggregation_query_prep_stream_with_read_time(): + client = make_client() + parent = client.collection("dee") + query = make_query(parent) + aggregation_query = make_aggregation_query(query) + + aggregation_query.count(alias="all") + aggregation_query.sum("someref", alias="sumall") + aggregation_query.avg("anotherref", alias="avgall") + + # 1800 seconds after epoch + read_time = datetime.now() + + request, kwargs = aggregation_query._prep_stream(read_time=read_time) + + parent_path, _ = parent._parent_info() + expected_request = { + "parent": parent_path, + "structured_aggregation_query": aggregation_query._to_protobuf(), + "transaction": None, + "read_time": read_time, + } + assert request == expected_request + assert kwargs == {"retry": None} + + +@pytest.mark.parametrize( + "timezone", [None, timezone.utc, timezone(timedelta(hours=5))] +) +def test_aggregation_query_get_stream_iterator_read_time_different_timezones(timezone): + client = make_client() + parent = client.collection("dee") + query = make_query(parent) + aggregation_query = make_aggregation_query(query) + + aggregation_query.count(alias="all") + aggregation_query.sum("someref", alias="sumall") + aggregation_query.avg("anotherref", alias="avgall") + + # 1800 seconds after epoch + read_time = datetime(1970, 1, 1, 0, 30) + if timezone is not None: + read_time = read_time.astimezone(timezone) + + # The internal firestore API needs to be initialized before it gets mocked. + client._firestore_api + + # Validate that the same timestamp_pb object would be sent in the actual request. + with mock.patch.object( + type(client._firestore_api_internal.transport.run_aggregation_query), "__call__" + ) as call: + call.return_value = iter([RunAggregationQueryResponse()]) + aggregation_query._get_stream_iterator( + transaction=None, + retry=None, + timeout=None, + read_time=read_time + ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request_read_time = args[0].read_time + + # Verify that the timestamp is correct. + expected_timestamp = Timestamp(seconds=1800) + assert request_read_time.timestamp_pb() == expected_timestamp + + def _aggregation_query_get_helper( retry=None, timeout=None, From f19995edff1c23dde708c35ef2598cc8ca32408a Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Fri, 14 Feb 2025 18:11:31 +0000 Subject: [PATCH 13/37] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20?= =?UTF-8?q?post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- tests/system/test_system.py | 12 +++++++++--- tests/unit/v1/test_aggregation.py | 16 +++++++--------- 2 files changed, 16 insertions(+), 12 deletions(-) diff --git a/tests/system/test_system.py b/tests/system/test_system.py index f8e4126df6..386bc5deb2 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -3471,10 +3471,16 @@ def in_transaction(transaction): # if the read_time dates before the DB was created, or FailedPrecondition # otherwise. with pytest.raises((InvalidArgument, FailedPrecondition)): - [docs.get("b") for docs in transaction.get(query, read_time=past_read_time)] - + [ + docs.get("b") + for docs in transaction.get(query, read_time=past_read_time) + ] + with pytest.raises(InvalidArgument): - [docs.get("b") for docs in transaction.get(query, read_time=future_read_time)] + [ + docs.get("b") + for docs in transaction.get(query, read_time=future_read_time) + ] inner_fn_ran = True diff --git a/tests/unit/v1/test_aggregation.py b/tests/unit/v1/test_aggregation.py index c1039e2335..c40175a148 100644 --- a/tests/unit/v1/test_aggregation.py +++ b/tests/unit/v1/test_aggregation.py @@ -26,7 +26,10 @@ from google.cloud.firestore_v1.query_profile import ExplainMetrics, QueryExplainError from google.cloud.firestore_v1.query_results import QueryResultsList from google.cloud.firestore_v1.stream_generator import StreamGenerator -from google.cloud.firestore_v1.types import RunAggregationQueryRequest, RunAggregationQueryResponse +from google.cloud.firestore_v1.types import ( + RunAggregationQueryRequest, + RunAggregationQueryResponse, +) from google.protobuf.timestamp_pb2 import Timestamp from tests.unit.v1._test_helpers import ( make_aggregation_query, @@ -412,9 +415,7 @@ def test_aggregation_query_prep_stream_with_read_time(): assert kwargs == {"retry": None} -@pytest.mark.parametrize( - "timezone", [None, timezone.utc, timezone(timedelta(hours=5))] -) +@pytest.mark.parametrize("timezone", [None, timezone.utc, timezone(timedelta(hours=5))]) def test_aggregation_query_get_stream_iterator_read_time_different_timezones(timezone): client = make_client() parent = client.collection("dee") @@ -429,7 +430,7 @@ def test_aggregation_query_get_stream_iterator_read_time_different_timezones(tim read_time = datetime(1970, 1, 1, 0, 30) if timezone is not None: read_time = read_time.astimezone(timezone) - + # The internal firestore API needs to be initialized before it gets mocked. client._firestore_api @@ -439,10 +440,7 @@ def test_aggregation_query_get_stream_iterator_read_time_different_timezones(tim ) as call: call.return_value = iter([RunAggregationQueryResponse()]) aggregation_query._get_stream_iterator( - transaction=None, - retry=None, - timeout=None, - read_time=read_time + transaction=None, retry=None, timeout=None, read_time=read_time ) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] From 7f99200d61268ba6e8d6591c26ba2b62b06c4cfd Mon Sep 17 00:00:00 2001 From: Kevin Zheng Date: Fri, 14 Feb 2025 18:24:14 +0000 Subject: [PATCH 14/37] linting --- tests/unit/v1/test_aggregation.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/tests/unit/v1/test_aggregation.py b/tests/unit/v1/test_aggregation.py index c40175a148..e13c010c37 100644 --- a/tests/unit/v1/test_aggregation.py +++ b/tests/unit/v1/test_aggregation.py @@ -26,10 +26,7 @@ from google.cloud.firestore_v1.query_profile import ExplainMetrics, QueryExplainError from google.cloud.firestore_v1.query_results import QueryResultsList from google.cloud.firestore_v1.stream_generator import StreamGenerator -from google.cloud.firestore_v1.types import ( - RunAggregationQueryRequest, - RunAggregationQueryResponse, -) +from google.cloud.firestore_v1.types import RunAggregationQueryResponse from google.protobuf.timestamp_pb2 import Timestamp from tests.unit.v1._test_helpers import ( make_aggregation_query, From 6f5b1c467339a4f9a710c27d507105d758107e1e Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 16 Jan 2025 12:07:20 -0800 Subject: [PATCH 15/37] chore: update protoplus for python 3.13 (#1009) --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index b779b4ce18..635d95eb48 100644 --- a/setup.py +++ b/setup.py @@ -37,6 +37,7 @@ "google-cloud-core >= 1.4.1, <3.0.0dev", "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "proto-plus >= 1.25.0, <2.0.0dev; python_version>='3.13'", "protobuf>=3.20.2,<6.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] extras = {} From 0652cbcab5532bacca25a6f8b27ba31746736d42 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 21 Jan 2025 15:22:51 -0800 Subject: [PATCH 16/37] fix: bump default deadline on CreateDatabase and RestoreDatabase to 2 minutes (#975) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.19.1 PiperOrigin-RevId: 684571179 Source-Link: https://github.com/googleapis/googleapis/commit/fbdc238931e0a7a95c0f55e0cd3ad9e3de2535c8 Source-Link: https://github.com/googleapis/googleapis-gen/commit/3a2cdcfb80c2d0f5ec0cc663c2bab0a9486229d0 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiM2EyY2RjZmI4MGMyZDBmNWVjMGNjNjYzYzJiYWIwYTk0ODYyMjlkMCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Configure Ruby clients for google-ads-ad_manager PiperOrigin-RevId: 689139590 Source-Link: https://github.com/googleapis/googleapis/commit/296f2ac1aa9abccb7708b639b7839faa1809087f Source-Link: https://github.com/googleapis/googleapis-gen/commit/26927362e0aa1293258fc23fe3ce83c5c21d5fbb Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMjY5MjczNjJlMGFhMTI5MzI1OGZjMjNmZTNjZTgzYzVjMjFkNWZiYiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: remove body selector from http rule PiperOrigin-RevId: 693215877 Source-Link: https://github.com/googleapis/googleapis/commit/bb6b53e326ce2db403d18be7158c265e07948920 Source-Link: https://github.com/googleapis/googleapis-gen/commit/db8b5a93484ad44055b2bacc4c7cf87e970fe0ed Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZGI4YjVhOTM0ODRhZDQ0MDU1YjJiYWNjNGM3Y2Y4N2U5NzBmZTBlZCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: Add support for opt-in debug logging fix: Fix typing issue with gRPC metadata when key ends in -bin chore: Update gapic-generator-python to v1.21.0 PiperOrigin-RevId: 705285820 Source-Link: https://github.com/googleapis/googleapis/commit/f9b8b9150f7fcd600b0acaeef91236b1843f5e49 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ca1e0a1e472d6e6f5de883a5cb54724f112ce348 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiY2ExZTBhMWU0NzJkNmU2ZjVkZTg4M2E1Y2I1NDcyNGYxMTJjZTM0OCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix: bump default deadline on CreateDatabase and RestoreDatabase to 2 minutes feat: add filter argument to FirestoreAdmin.ListBackupsRequest PiperOrigin-RevId: 716763143 Source-Link: https://github.com/googleapis/googleapis/commit/3776db131e34e42ec8d287203020cb4282166aa5 Source-Link: https://github.com/googleapis/googleapis-gen/commit/10db5ac476a94aa4c9e0a24946d9fa1b7ea456f6 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTBkYjVhYzQ3NmE5NGFhNGM5ZTBhMjQ5NDZkOWZhMWI3ZWE0NTZmNiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Daniel Sanche --- .../services/firestore_admin/async_client.py | 280 +- .../services/firestore_admin/client.py | 325 +- .../services/firestore_admin/pagers.py | 32 +- .../firestore_admin/transports/README.rst | 9 + .../firestore_admin/transports/base.py | 24 +- .../firestore_admin/transports/grpc.py | 150 +- .../transports/grpc_asyncio.py | 232 +- .../firestore_admin/transports/rest.py | 3967 +++-- .../firestore_admin/transports/rest_base.py | 1437 ++ .../types/firestore_admin.py | 19 + .../services/firestore/async_client.py | 216 +- .../firestore_v1/services/firestore/client.py | 261 +- .../firestore_v1/services/firestore/pagers.py | 48 +- .../services/firestore/transports/README.rst | 9 + .../services/firestore/transports/base.py | 20 + .../services/firestore/transports/grpc.py | 130 +- .../firestore/transports/grpc_asyncio.py | 194 +- .../services/firestore/transports/rest.py | 2560 +++- .../firestore/transports/rest_base.py | 1004 ++ scripts/fixup_firestore_admin_v1_keywords.py | 2 +- .../test_firestore_admin.py | 12150 ++++++++-------- .../unit/gapic/firestore_v1/test_firestore.py | 6584 +++++---- 22 files changed, 18125 insertions(+), 11528 deletions(-) create mode 100644 google/cloud/firestore_admin_v1/services/firestore_admin/transports/README.rst create mode 100644 google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest_base.py create mode 100644 google/cloud/firestore_v1/services/firestore/transports/README.rst create mode 100644 google/cloud/firestore_v1/services/firestore/transports/rest_base.py diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py b/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py index db6037da34..6168934029 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict import re from typing import ( @@ -66,6 +67,15 @@ from .transports.grpc_asyncio import FirestoreAdminGrpcAsyncIOTransport from .client import FirestoreAdminClient +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + class FirestoreAdminAsyncClient: """The Cloud Firestore Admin API. @@ -320,6 +330,28 @@ def __init__( client_info=client_info, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.firestore.admin_v1.FirestoreAdminAsyncClient`.", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "credentialsType": None, + }, + ) + async def create_index( self, request: Optional[Union[firestore_admin.CreateIndexRequest, dict]] = None, @@ -328,7 +360,7 @@ async def create_index( index: Optional[gfa_index.Index] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Creates a composite index. This returns a [google.longrunning.Operation][google.longrunning.Operation] @@ -387,8 +419,10 @@ async def sample_create_index(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -461,7 +495,7 @@ async def list_indexes( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListIndexesAsyncPager: r"""Lists composite indexes. @@ -506,8 +540,10 @@ async def sample_list_indexes(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_admin_v1.services.firestore_admin.pagers.ListIndexesAsyncPager: @@ -582,7 +618,7 @@ async def get_index( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> index.Index: r"""Gets a composite index. @@ -626,8 +662,10 @@ async def sample_get_index(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_admin_v1.types.Index: @@ -689,7 +727,7 @@ async def delete_index( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a composite index. @@ -730,8 +768,10 @@ async def sample_delete_index(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -783,7 +823,7 @@ async def get_field( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> field.Field: r"""Gets the metadata and configuration for a Field. @@ -827,8 +867,10 @@ async def sample_get_field(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_admin_v1.types.Field: @@ -892,7 +934,7 @@ async def update_field( field: Optional[gfa_field.Field] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Updates a field configuration. Currently, field updates apply only to single field index configuration. However, calls to @@ -956,8 +998,10 @@ async def sample_update_field(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -1035,7 +1079,7 @@ async def list_fields( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListFieldsAsyncPager: r"""Lists the field configuration and metadata for this database. @@ -1088,8 +1132,10 @@ async def sample_list_fields(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_admin_v1.services.firestore_admin.pagers.ListFieldsAsyncPager: @@ -1164,7 +1210,7 @@ async def export_documents( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Exports a copy of all or a subset of documents from Google Cloud Firestore to another storage system, such @@ -1226,8 +1272,10 @@ async def sample_export_documents(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -1298,7 +1346,7 @@ async def import_documents( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Imports documents into Google Cloud Firestore. Existing documents with the same name are overwritten. @@ -1352,8 +1400,10 @@ async def sample_import_documents(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -1434,7 +1484,7 @@ async def bulk_delete_documents( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Bulk deletes a subset of documents from Google Cloud Firestore. Documents created or updated after the @@ -1499,8 +1549,10 @@ async def sample_bulk_delete_documents(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -1573,7 +1625,7 @@ async def create_database( database_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Create a database. @@ -1642,8 +1694,10 @@ async def sample_create_database(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -1719,7 +1773,7 @@ async def get_database( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> database.Database: r"""Gets information about a database. @@ -1763,8 +1817,10 @@ async def sample_get_database(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_admin_v1.types.Database: @@ -1823,7 +1879,7 @@ async def list_databases( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> firestore_admin.ListDatabasesResponse: r"""List all the databases in the project. @@ -1868,8 +1924,10 @@ async def sample_list_databases(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_admin_v1.types.ListDatabasesResponse: @@ -1929,7 +1987,7 @@ async def update_database( update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Updates a database. @@ -1979,8 +2037,10 @@ async def sample_update_database(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -2056,7 +2116,7 @@ async def delete_database( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Deletes a database. @@ -2104,8 +2164,10 @@ async def sample_delete_database(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -2177,7 +2239,7 @@ async def get_backup( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> backup.Backup: r"""Gets information about a backup. @@ -2223,8 +2285,10 @@ async def sample_get_backup(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_admin_v1.types.Backup: @@ -2288,7 +2352,7 @@ async def list_backups( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> firestore_admin.ListBackupsResponse: r"""Lists all the backups. @@ -2336,8 +2400,10 @@ async def sample_list_backups(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_admin_v1.types.ListBackupsResponse: @@ -2398,7 +2464,7 @@ async def delete_backup( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a backup. @@ -2441,8 +2507,10 @@ async def sample_delete_backup(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -2493,7 +2561,7 @@ async def restore_database( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Creates a new database by restoring from an existing backup. @@ -2554,8 +2622,10 @@ async def sample_restore_database(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -2616,7 +2686,7 @@ async def create_backup_schedule( backup_schedule: Optional[schedule.BackupSchedule] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> schedule.BackupSchedule: r"""Creates a backup schedule on a database. At most two backup schedules can be configured on a @@ -2671,8 +2741,10 @@ async def sample_create_backup_schedule(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_admin_v1.types.BackupSchedule: @@ -2739,7 +2811,7 @@ async def get_backup_schedule( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> schedule.BackupSchedule: r"""Gets information about a backup schedule. @@ -2785,8 +2857,10 @@ async def sample_get_backup_schedule(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_admin_v1.types.BackupSchedule: @@ -2853,7 +2927,7 @@ async def list_backup_schedules( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> firestore_admin.ListBackupSchedulesResponse: r"""List backup schedules. @@ -2898,8 +2972,10 @@ async def sample_list_backup_schedules(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_admin_v1.types.ListBackupSchedulesResponse: @@ -2963,7 +3039,7 @@ async def update_backup_schedule( update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> schedule.BackupSchedule: r"""Updates a backup schedule. @@ -3011,8 +3087,10 @@ async def sample_update_backup_schedule(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_admin_v1.types.BackupSchedule: @@ -3083,7 +3161,7 @@ async def delete_backup_schedule( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a backup schedule. @@ -3126,8 +3204,10 @@ async def sample_delete_backup_schedule(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -3178,7 +3258,7 @@ async def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -3189,8 +3269,10 @@ async def list_operations( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -3203,11 +3285,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -3235,7 +3313,7 @@ async def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -3246,8 +3324,10 @@ async def get_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -3260,11 +3340,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -3292,7 +3368,7 @@ async def delete_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a long-running operation. @@ -3308,8 +3384,10 @@ async def delete_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -3321,11 +3399,7 @@ async def delete_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -3350,7 +3424,7 @@ async def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -3365,8 +3439,10 @@ async def cancel_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -3378,11 +3454,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/client.py b/google/cloud/firestore_admin_v1/services/firestore_admin/client.py index 2b4fa5890c..9791ef7c69 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/client.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/client.py @@ -14,6 +14,7 @@ # limitations under the License. # from collections import OrderedDict +import logging as std_logging import os import re from typing import ( @@ -48,6 +49,15 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.firestore_admin_v1.services.firestore_admin import pagers @@ -656,36 +666,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = FirestoreAdminClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -695,13 +675,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or FirestoreAdminClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -807,6 +783,10 @@ def __init__( # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( @@ -872,6 +852,29 @@ def __init__( api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.firestore.admin_v1.FirestoreAdminClient`.", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "credentialsType": None, + }, + ) + def create_index( self, request: Optional[Union[firestore_admin.CreateIndexRequest, dict]] = None, @@ -880,7 +883,7 @@ def create_index( index: Optional[gfa_index.Index] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gac_operation.Operation: r"""Creates a composite index. This returns a [google.longrunning.Operation][google.longrunning.Operation] @@ -939,8 +942,10 @@ def sample_create_index(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1010,7 +1015,7 @@ def list_indexes( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListIndexesPager: r"""Lists composite indexes. @@ -1055,8 +1060,10 @@ def sample_list_indexes(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_admin_v1.services.firestore_admin.pagers.ListIndexesPager: @@ -1128,7 +1135,7 @@ def get_index( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> index.Index: r"""Gets a composite index. @@ -1172,8 +1179,10 @@ def sample_get_index(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_admin_v1.types.Index: @@ -1232,7 +1241,7 @@ def delete_index( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a composite index. @@ -1273,8 +1282,10 @@ def sample_delete_index(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -1323,7 +1334,7 @@ def get_field( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> field.Field: r"""Gets the metadata and configuration for a Field. @@ -1367,8 +1378,10 @@ def sample_get_field(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_admin_v1.types.Field: @@ -1429,7 +1442,7 @@ def update_field( field: Optional[gfa_field.Field] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gac_operation.Operation: r"""Updates a field configuration. Currently, field updates apply only to single field index configuration. However, calls to @@ -1493,8 +1506,10 @@ def sample_update_field(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1569,7 +1584,7 @@ def list_fields( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListFieldsPager: r"""Lists the field configuration and metadata for this database. @@ -1622,8 +1637,10 @@ def sample_list_fields(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_admin_v1.services.firestore_admin.pagers.ListFieldsPager: @@ -1695,7 +1712,7 @@ def export_documents( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gac_operation.Operation: r"""Exports a copy of all or a subset of documents from Google Cloud Firestore to another storage system, such @@ -1757,8 +1774,10 @@ def sample_export_documents(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1826,7 +1845,7 @@ def import_documents( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gac_operation.Operation: r"""Imports documents into Google Cloud Firestore. Existing documents with the same name are overwritten. @@ -1880,8 +1899,10 @@ def sample_import_documents(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1959,7 +1980,7 @@ def bulk_delete_documents( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gac_operation.Operation: r"""Bulk deletes a subset of documents from Google Cloud Firestore. Documents created or updated after the @@ -2024,8 +2045,10 @@ def sample_bulk_delete_documents(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -2095,7 +2118,7 @@ def create_database( database_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gac_operation.Operation: r"""Create a database. @@ -2164,8 +2187,10 @@ def sample_create_database(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -2238,7 +2263,7 @@ def get_database( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> database.Database: r"""Gets information about a database. @@ -2282,8 +2307,10 @@ def sample_get_database(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_admin_v1.types.Database: @@ -2339,7 +2366,7 @@ def list_databases( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> firestore_admin.ListDatabasesResponse: r"""List all the databases in the project. @@ -2384,8 +2411,10 @@ def sample_list_databases(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_admin_v1.types.ListDatabasesResponse: @@ -2442,7 +2471,7 @@ def update_database( update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gac_operation.Operation: r"""Updates a database. @@ -2492,8 +2521,10 @@ def sample_update_database(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -2566,7 +2597,7 @@ def delete_database( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gac_operation.Operation: r"""Deletes a database. @@ -2614,8 +2645,10 @@ def sample_delete_database(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -2684,7 +2717,7 @@ def get_backup( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> backup.Backup: r"""Gets information about a backup. @@ -2730,8 +2763,10 @@ def sample_get_backup(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_admin_v1.types.Backup: @@ -2792,7 +2827,7 @@ def list_backups( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> firestore_admin.ListBackupsResponse: r"""Lists all the backups. @@ -2840,8 +2875,10 @@ def sample_list_backups(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_admin_v1.types.ListBackupsResponse: @@ -2899,7 +2936,7 @@ def delete_backup( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a backup. @@ -2942,8 +2979,10 @@ def sample_delete_backup(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -2991,7 +3030,7 @@ def restore_database( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gac_operation.Operation: r"""Creates a new database by restoring from an existing backup. @@ -3052,8 +3091,10 @@ def sample_restore_database(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -3112,7 +3153,7 @@ def create_backup_schedule( backup_schedule: Optional[schedule.BackupSchedule] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> schedule.BackupSchedule: r"""Creates a backup schedule on a database. At most two backup schedules can be configured on a @@ -3167,8 +3208,10 @@ def sample_create_backup_schedule(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_admin_v1.types.BackupSchedule: @@ -3232,7 +3275,7 @@ def get_backup_schedule( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> schedule.BackupSchedule: r"""Gets information about a backup schedule. @@ -3278,8 +3321,10 @@ def sample_get_backup_schedule(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_admin_v1.types.BackupSchedule: @@ -3343,7 +3388,7 @@ def list_backup_schedules( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> firestore_admin.ListBackupSchedulesResponse: r"""List backup schedules. @@ -3388,8 +3433,10 @@ def sample_list_backup_schedules(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_admin_v1.types.ListBackupSchedulesResponse: @@ -3450,7 +3497,7 @@ def update_backup_schedule( update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> schedule.BackupSchedule: r"""Updates a backup schedule. @@ -3498,8 +3545,10 @@ def sample_update_backup_schedule(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_admin_v1.types.BackupSchedule: @@ -3567,7 +3616,7 @@ def delete_backup_schedule( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a backup schedule. @@ -3610,8 +3659,10 @@ def sample_delete_backup_schedule(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -3672,7 +3723,7 @@ def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -3683,8 +3734,10 @@ def list_operations( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -3697,11 +3750,7 @@ def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -3729,7 +3778,7 @@ def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -3740,8 +3789,10 @@ def get_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -3754,11 +3805,7 @@ def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -3786,7 +3833,7 @@ def delete_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a long-running operation. @@ -3802,8 +3849,10 @@ def delete_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -3815,11 +3864,7 @@ def delete_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.delete_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -3844,7 +3889,7 @@ def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -3859,8 +3904,10 @@ def cancel_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -3872,11 +3919,7 @@ def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py b/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py index 423c43d9f3..3520d07727 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py @@ -68,7 +68,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -82,8 +82,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = firestore_admin.ListIndexesRequest(request) @@ -142,7 +144,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -156,8 +158,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = firestore_admin.ListIndexesRequest(request) @@ -220,7 +224,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -234,8 +238,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = firestore_admin.ListFieldsRequest(request) @@ -294,7 +300,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -308,8 +314,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = firestore_admin.ListFieldsRequest(request) diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/README.rst b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/README.rst new file mode 100644 index 0000000000..ffcad7a891 --- /dev/null +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`FirestoreAdminTransport` is the ABC for all transports. +- public child `FirestoreAdminGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `FirestoreAdminGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseFirestoreAdminRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `FirestoreAdminRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py index 0bbbf2381c..2014cc0cb9 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py @@ -248,7 +248,7 @@ def _prep_wrapped_messages(self, client_info): ), self.create_database: gapic_v1.method.wrap_method( self.create_database, - default_timeout=None, + default_timeout=120.0, client_info=client_info, ), self.get_database: gapic_v1.method.wrap_method( @@ -288,7 +288,7 @@ def _prep_wrapped_messages(self, client_info): ), self.restore_database: gapic_v1.method.wrap_method( self.restore_database, - default_timeout=None, + default_timeout=120.0, client_info=client_info, ), self.create_backup_schedule: gapic_v1.method.wrap_method( @@ -316,6 +316,26 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py index c53adc1315..9fbddcef35 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -22,8 +25,11 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.firestore_admin_v1.types import backup from google.cloud.firestore_admin_v1.types import database @@ -36,6 +42,81 @@ from google.protobuf import empty_pb2 # type: ignore from .base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class FirestoreAdminGrpcTransport(FirestoreAdminTransport): """gRPC backend transport for FirestoreAdmin. @@ -220,7 +301,12 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -284,7 +370,9 @@ def operations_client(self) -> operations_v1.OperationsClient: """ # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) # Return the client from cache. return self._operations_client @@ -312,7 +400,7 @@ def create_index( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_index" not in self._stubs: - self._stubs["create_index"] = self.grpc_channel.unary_unary( + self._stubs["create_index"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/CreateIndex", request_serializer=firestore_admin.CreateIndexRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -340,7 +428,7 @@ def list_indexes( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_indexes" not in self._stubs: - self._stubs["list_indexes"] = self.grpc_channel.unary_unary( + self._stubs["list_indexes"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/ListIndexes", request_serializer=firestore_admin.ListIndexesRequest.serialize, response_deserializer=firestore_admin.ListIndexesResponse.deserialize, @@ -364,7 +452,7 @@ def get_index(self) -> Callable[[firestore_admin.GetIndexRequest], index.Index]: # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_index" not in self._stubs: - self._stubs["get_index"] = self.grpc_channel.unary_unary( + self._stubs["get_index"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/GetIndex", request_serializer=firestore_admin.GetIndexRequest.serialize, response_deserializer=index.Index.deserialize, @@ -390,7 +478,7 @@ def delete_index( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_index" not in self._stubs: - self._stubs["delete_index"] = self.grpc_channel.unary_unary( + self._stubs["delete_index"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/DeleteIndex", request_serializer=firestore_admin.DeleteIndexRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -414,7 +502,7 @@ def get_field(self) -> Callable[[firestore_admin.GetFieldRequest], field.Field]: # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_field" not in self._stubs: - self._stubs["get_field"] = self.grpc_channel.unary_unary( + self._stubs["get_field"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/GetField", request_serializer=firestore_admin.GetFieldRequest.serialize, response_deserializer=field.Field.deserialize, @@ -455,7 +543,7 @@ def update_field( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_field" not in self._stubs: - self._stubs["update_field"] = self.grpc_channel.unary_unary( + self._stubs["update_field"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/UpdateField", request_serializer=firestore_admin.UpdateFieldRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -491,7 +579,7 @@ def list_fields( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_fields" not in self._stubs: - self._stubs["list_fields"] = self.grpc_channel.unary_unary( + self._stubs["list_fields"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/ListFields", request_serializer=firestore_admin.ListFieldsRequest.serialize, response_deserializer=firestore_admin.ListFieldsResponse.deserialize, @@ -531,7 +619,7 @@ def export_documents( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "export_documents" not in self._stubs: - self._stubs["export_documents"] = self.grpc_channel.unary_unary( + self._stubs["export_documents"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/ExportDocuments", request_serializer=firestore_admin.ExportDocumentsRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -563,7 +651,7 @@ def import_documents( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "import_documents" not in self._stubs: - self._stubs["import_documents"] = self.grpc_channel.unary_unary( + self._stubs["import_documents"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/ImportDocuments", request_serializer=firestore_admin.ImportDocumentsRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -600,7 +688,7 @@ def bulk_delete_documents( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "bulk_delete_documents" not in self._stubs: - self._stubs["bulk_delete_documents"] = self.grpc_channel.unary_unary( + self._stubs["bulk_delete_documents"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/BulkDeleteDocuments", request_serializer=firestore_admin.BulkDeleteDocumentsRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -626,7 +714,7 @@ def create_database( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_database" not in self._stubs: - self._stubs["create_database"] = self.grpc_channel.unary_unary( + self._stubs["create_database"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/CreateDatabase", request_serializer=firestore_admin.CreateDatabaseRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -652,7 +740,7 @@ def get_database( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_database" not in self._stubs: - self._stubs["get_database"] = self.grpc_channel.unary_unary( + self._stubs["get_database"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/GetDatabase", request_serializer=firestore_admin.GetDatabaseRequest.serialize, response_deserializer=database.Database.deserialize, @@ -680,7 +768,7 @@ def list_databases( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_databases" not in self._stubs: - self._stubs["list_databases"] = self.grpc_channel.unary_unary( + self._stubs["list_databases"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/ListDatabases", request_serializer=firestore_admin.ListDatabasesRequest.serialize, response_deserializer=firestore_admin.ListDatabasesResponse.deserialize, @@ -706,7 +794,7 @@ def update_database( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_database" not in self._stubs: - self._stubs["update_database"] = self.grpc_channel.unary_unary( + self._stubs["update_database"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/UpdateDatabase", request_serializer=firestore_admin.UpdateDatabaseRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -732,7 +820,7 @@ def delete_database( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_database" not in self._stubs: - self._stubs["delete_database"] = self.grpc_channel.unary_unary( + self._stubs["delete_database"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/DeleteDatabase", request_serializer=firestore_admin.DeleteDatabaseRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -756,7 +844,7 @@ def get_backup(self) -> Callable[[firestore_admin.GetBackupRequest], backup.Back # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_backup" not in self._stubs: - self._stubs["get_backup"] = self.grpc_channel.unary_unary( + self._stubs["get_backup"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/GetBackup", request_serializer=firestore_admin.GetBackupRequest.serialize, response_deserializer=backup.Backup.deserialize, @@ -784,7 +872,7 @@ def list_backups( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_backups" not in self._stubs: - self._stubs["list_backups"] = self.grpc_channel.unary_unary( + self._stubs["list_backups"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/ListBackups", request_serializer=firestore_admin.ListBackupsRequest.serialize, response_deserializer=firestore_admin.ListBackupsResponse.deserialize, @@ -810,7 +898,7 @@ def delete_backup( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_backup" not in self._stubs: - self._stubs["delete_backup"] = self.grpc_channel.unary_unary( + self._stubs["delete_backup"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/DeleteBackup", request_serializer=firestore_admin.DeleteBackupRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -854,7 +942,7 @@ def restore_database( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "restore_database" not in self._stubs: - self._stubs["restore_database"] = self.grpc_channel.unary_unary( + self._stubs["restore_database"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/RestoreDatabase", request_serializer=firestore_admin.RestoreDatabaseRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -885,7 +973,7 @@ def create_backup_schedule( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_backup_schedule" not in self._stubs: - self._stubs["create_backup_schedule"] = self.grpc_channel.unary_unary( + self._stubs["create_backup_schedule"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/CreateBackupSchedule", request_serializer=firestore_admin.CreateBackupScheduleRequest.serialize, response_deserializer=schedule.BackupSchedule.deserialize, @@ -911,7 +999,7 @@ def get_backup_schedule( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_backup_schedule" not in self._stubs: - self._stubs["get_backup_schedule"] = self.grpc_channel.unary_unary( + self._stubs["get_backup_schedule"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/GetBackupSchedule", request_serializer=firestore_admin.GetBackupScheduleRequest.serialize, response_deserializer=schedule.BackupSchedule.deserialize, @@ -940,7 +1028,7 @@ def list_backup_schedules( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_backup_schedules" not in self._stubs: - self._stubs["list_backup_schedules"] = self.grpc_channel.unary_unary( + self._stubs["list_backup_schedules"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/ListBackupSchedules", request_serializer=firestore_admin.ListBackupSchedulesRequest.serialize, response_deserializer=firestore_admin.ListBackupSchedulesResponse.deserialize, @@ -968,7 +1056,7 @@ def update_backup_schedule( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_backup_schedule" not in self._stubs: - self._stubs["update_backup_schedule"] = self.grpc_channel.unary_unary( + self._stubs["update_backup_schedule"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/UpdateBackupSchedule", request_serializer=firestore_admin.UpdateBackupScheduleRequest.serialize, response_deserializer=schedule.BackupSchedule.deserialize, @@ -994,7 +1082,7 @@ def delete_backup_schedule( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_backup_schedule" not in self._stubs: - self._stubs["delete_backup_schedule"] = self.grpc_channel.unary_unary( + self._stubs["delete_backup_schedule"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/DeleteBackupSchedule", request_serializer=firestore_admin.DeleteBackupScheduleRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -1002,7 +1090,7 @@ def delete_backup_schedule( return self._stubs["delete_backup_schedule"] def close(self): - self.grpc_channel.close() + self._logged_channel.close() @property def delete_operation( @@ -1014,7 +1102,7 @@ def delete_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + self._stubs["delete_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/DeleteOperation", request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, response_deserializer=None, @@ -1031,7 +1119,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -1048,7 +1136,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -1067,7 +1155,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py index 31593beb5e..5c0827e21b 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -23,8 +27,11 @@ from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.firestore_admin_v1.types import backup @@ -39,6 +46,82 @@ from .base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO from .grpc import FirestoreAdminGrpcTransport +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class FirestoreAdminGrpcAsyncIOTransport(FirestoreAdminTransport): """gRPC AsyncIO backend transport for FirestoreAdmin. @@ -266,7 +349,13 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -289,7 +378,7 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel + self._logged_channel ) # Return the client from cache. @@ -320,7 +409,7 @@ def create_index( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_index" not in self._stubs: - self._stubs["create_index"] = self.grpc_channel.unary_unary( + self._stubs["create_index"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/CreateIndex", request_serializer=firestore_admin.CreateIndexRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -349,7 +438,7 @@ def list_indexes( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_indexes" not in self._stubs: - self._stubs["list_indexes"] = self.grpc_channel.unary_unary( + self._stubs["list_indexes"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/ListIndexes", request_serializer=firestore_admin.ListIndexesRequest.serialize, response_deserializer=firestore_admin.ListIndexesResponse.deserialize, @@ -375,7 +464,7 @@ def get_index( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_index" not in self._stubs: - self._stubs["get_index"] = self.grpc_channel.unary_unary( + self._stubs["get_index"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/GetIndex", request_serializer=firestore_admin.GetIndexRequest.serialize, response_deserializer=index.Index.deserialize, @@ -401,7 +490,7 @@ def delete_index( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_index" not in self._stubs: - self._stubs["delete_index"] = self.grpc_channel.unary_unary( + self._stubs["delete_index"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/DeleteIndex", request_serializer=firestore_admin.DeleteIndexRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -427,7 +516,7 @@ def get_field( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_field" not in self._stubs: - self._stubs["get_field"] = self.grpc_channel.unary_unary( + self._stubs["get_field"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/GetField", request_serializer=firestore_admin.GetFieldRequest.serialize, response_deserializer=field.Field.deserialize, @@ -470,7 +559,7 @@ def update_field( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_field" not in self._stubs: - self._stubs["update_field"] = self.grpc_channel.unary_unary( + self._stubs["update_field"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/UpdateField", request_serializer=firestore_admin.UpdateFieldRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -507,7 +596,7 @@ def list_fields( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_fields" not in self._stubs: - self._stubs["list_fields"] = self.grpc_channel.unary_unary( + self._stubs["list_fields"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/ListFields", request_serializer=firestore_admin.ListFieldsRequest.serialize, response_deserializer=firestore_admin.ListFieldsResponse.deserialize, @@ -549,7 +638,7 @@ def export_documents( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "export_documents" not in self._stubs: - self._stubs["export_documents"] = self.grpc_channel.unary_unary( + self._stubs["export_documents"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/ExportDocuments", request_serializer=firestore_admin.ExportDocumentsRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -583,7 +672,7 @@ def import_documents( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "import_documents" not in self._stubs: - self._stubs["import_documents"] = self.grpc_channel.unary_unary( + self._stubs["import_documents"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/ImportDocuments", request_serializer=firestore_admin.ImportDocumentsRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -621,7 +710,7 @@ def bulk_delete_documents( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "bulk_delete_documents" not in self._stubs: - self._stubs["bulk_delete_documents"] = self.grpc_channel.unary_unary( + self._stubs["bulk_delete_documents"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/BulkDeleteDocuments", request_serializer=firestore_admin.BulkDeleteDocumentsRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -649,7 +738,7 @@ def create_database( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_database" not in self._stubs: - self._stubs["create_database"] = self.grpc_channel.unary_unary( + self._stubs["create_database"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/CreateDatabase", request_serializer=firestore_admin.CreateDatabaseRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -675,7 +764,7 @@ def get_database( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_database" not in self._stubs: - self._stubs["get_database"] = self.grpc_channel.unary_unary( + self._stubs["get_database"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/GetDatabase", request_serializer=firestore_admin.GetDatabaseRequest.serialize, response_deserializer=database.Database.deserialize, @@ -704,7 +793,7 @@ def list_databases( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_databases" not in self._stubs: - self._stubs["list_databases"] = self.grpc_channel.unary_unary( + self._stubs["list_databases"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/ListDatabases", request_serializer=firestore_admin.ListDatabasesRequest.serialize, response_deserializer=firestore_admin.ListDatabasesResponse.deserialize, @@ -732,7 +821,7 @@ def update_database( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_database" not in self._stubs: - self._stubs["update_database"] = self.grpc_channel.unary_unary( + self._stubs["update_database"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/UpdateDatabase", request_serializer=firestore_admin.UpdateDatabaseRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -760,7 +849,7 @@ def delete_database( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_database" not in self._stubs: - self._stubs["delete_database"] = self.grpc_channel.unary_unary( + self._stubs["delete_database"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/DeleteDatabase", request_serializer=firestore_admin.DeleteDatabaseRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -786,7 +875,7 @@ def get_backup( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_backup" not in self._stubs: - self._stubs["get_backup"] = self.grpc_channel.unary_unary( + self._stubs["get_backup"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/GetBackup", request_serializer=firestore_admin.GetBackupRequest.serialize, response_deserializer=backup.Backup.deserialize, @@ -815,7 +904,7 @@ def list_backups( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_backups" not in self._stubs: - self._stubs["list_backups"] = self.grpc_channel.unary_unary( + self._stubs["list_backups"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/ListBackups", request_serializer=firestore_admin.ListBackupsRequest.serialize, response_deserializer=firestore_admin.ListBackupsResponse.deserialize, @@ -841,7 +930,7 @@ def delete_backup( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_backup" not in self._stubs: - self._stubs["delete_backup"] = self.grpc_channel.unary_unary( + self._stubs["delete_backup"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/DeleteBackup", request_serializer=firestore_admin.DeleteBackupRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -887,7 +976,7 @@ def restore_database( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "restore_database" not in self._stubs: - self._stubs["restore_database"] = self.grpc_channel.unary_unary( + self._stubs["restore_database"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/RestoreDatabase", request_serializer=firestore_admin.RestoreDatabaseRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -919,7 +1008,7 @@ def create_backup_schedule( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_backup_schedule" not in self._stubs: - self._stubs["create_backup_schedule"] = self.grpc_channel.unary_unary( + self._stubs["create_backup_schedule"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/CreateBackupSchedule", request_serializer=firestore_admin.CreateBackupScheduleRequest.serialize, response_deserializer=schedule.BackupSchedule.deserialize, @@ -947,7 +1036,7 @@ def get_backup_schedule( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_backup_schedule" not in self._stubs: - self._stubs["get_backup_schedule"] = self.grpc_channel.unary_unary( + self._stubs["get_backup_schedule"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/GetBackupSchedule", request_serializer=firestore_admin.GetBackupScheduleRequest.serialize, response_deserializer=schedule.BackupSchedule.deserialize, @@ -976,7 +1065,7 @@ def list_backup_schedules( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_backup_schedules" not in self._stubs: - self._stubs["list_backup_schedules"] = self.grpc_channel.unary_unary( + self._stubs["list_backup_schedules"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/ListBackupSchedules", request_serializer=firestore_admin.ListBackupSchedulesRequest.serialize, response_deserializer=firestore_admin.ListBackupSchedulesResponse.deserialize, @@ -1005,7 +1094,7 @@ def update_backup_schedule( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_backup_schedule" not in self._stubs: - self._stubs["update_backup_schedule"] = self.grpc_channel.unary_unary( + self._stubs["update_backup_schedule"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/UpdateBackupSchedule", request_serializer=firestore_admin.UpdateBackupScheduleRequest.serialize, response_deserializer=schedule.BackupSchedule.deserialize, @@ -1033,7 +1122,7 @@ def delete_backup_schedule( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_backup_schedule" not in self._stubs: - self._stubs["delete_backup_schedule"] = self.grpc_channel.unary_unary( + self._stubs["delete_backup_schedule"] = self._logged_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/DeleteBackupSchedule", request_serializer=firestore_admin.DeleteBackupScheduleRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -1043,12 +1132,12 @@ def delete_backup_schedule( def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { - self.create_index: gapic_v1.method_async.wrap_method( + self.create_index: self._wrap_method( self.create_index, default_timeout=60.0, client_info=client_info, ), - self.list_indexes: gapic_v1.method_async.wrap_method( + self.list_indexes: self._wrap_method( self.list_indexes, default_retry=retries.AsyncRetry( initial=0.1, @@ -1064,7 +1153,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.get_index: gapic_v1.method_async.wrap_method( + self.get_index: self._wrap_method( self.get_index, default_retry=retries.AsyncRetry( initial=0.1, @@ -1080,7 +1169,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.delete_index: gapic_v1.method_async.wrap_method( + self.delete_index: self._wrap_method( self.delete_index, default_retry=retries.AsyncRetry( initial=0.1, @@ -1096,7 +1185,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.get_field: gapic_v1.method_async.wrap_method( + self.get_field: self._wrap_method( self.get_field, default_retry=retries.AsyncRetry( initial=0.1, @@ -1112,12 +1201,12 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.update_field: gapic_v1.method_async.wrap_method( + self.update_field: self._wrap_method( self.update_field, default_timeout=60.0, client_info=client_info, ), - self.list_fields: gapic_v1.method_async.wrap_method( + self.list_fields: self._wrap_method( self.list_fields, default_retry=retries.AsyncRetry( initial=0.1, @@ -1133,95 +1222,124 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.export_documents: gapic_v1.method_async.wrap_method( + self.export_documents: self._wrap_method( self.export_documents, default_timeout=60.0, client_info=client_info, ), - self.import_documents: gapic_v1.method_async.wrap_method( + self.import_documents: self._wrap_method( self.import_documents, default_timeout=60.0, client_info=client_info, ), - self.bulk_delete_documents: gapic_v1.method_async.wrap_method( + self.bulk_delete_documents: self._wrap_method( self.bulk_delete_documents, default_timeout=60.0, client_info=client_info, ), - self.create_database: gapic_v1.method_async.wrap_method( + self.create_database: self._wrap_method( self.create_database, - default_timeout=None, + default_timeout=120.0, client_info=client_info, ), - self.get_database: gapic_v1.method_async.wrap_method( + self.get_database: self._wrap_method( self.get_database, default_timeout=None, client_info=client_info, ), - self.list_databases: gapic_v1.method_async.wrap_method( + self.list_databases: self._wrap_method( self.list_databases, default_timeout=None, client_info=client_info, ), - self.update_database: gapic_v1.method_async.wrap_method( + self.update_database: self._wrap_method( self.update_database, default_timeout=None, client_info=client_info, ), - self.delete_database: gapic_v1.method_async.wrap_method( + self.delete_database: self._wrap_method( self.delete_database, default_timeout=None, client_info=client_info, ), - self.get_backup: gapic_v1.method_async.wrap_method( + self.get_backup: self._wrap_method( self.get_backup, default_timeout=None, client_info=client_info, ), - self.list_backups: gapic_v1.method_async.wrap_method( + self.list_backups: self._wrap_method( self.list_backups, default_timeout=None, client_info=client_info, ), - self.delete_backup: gapic_v1.method_async.wrap_method( + self.delete_backup: self._wrap_method( self.delete_backup, default_timeout=None, client_info=client_info, ), - self.restore_database: gapic_v1.method_async.wrap_method( + self.restore_database: self._wrap_method( self.restore_database, - default_timeout=None, + default_timeout=120.0, client_info=client_info, ), - self.create_backup_schedule: gapic_v1.method_async.wrap_method( + self.create_backup_schedule: self._wrap_method( self.create_backup_schedule, default_timeout=None, client_info=client_info, ), - self.get_backup_schedule: gapic_v1.method_async.wrap_method( + self.get_backup_schedule: self._wrap_method( self.get_backup_schedule, default_timeout=None, client_info=client_info, ), - self.list_backup_schedules: gapic_v1.method_async.wrap_method( + self.list_backup_schedules: self._wrap_method( self.list_backup_schedules, default_timeout=None, client_info=client_info, ), - self.update_backup_schedule: gapic_v1.method_async.wrap_method( + self.update_backup_schedule: self._wrap_method( self.update_backup_schedule, default_timeout=None, client_info=client_info, ), - self.delete_backup_schedule: gapic_v1.method_async.wrap_method( + self.delete_backup_schedule: self._wrap_method( self.delete_backup_schedule, default_timeout=None, client_info=client_info, ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" @property def delete_operation( @@ -1233,7 +1351,7 @@ def delete_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + self._stubs["delete_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/DeleteOperation", request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, response_deserializer=None, @@ -1250,7 +1368,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -1267,7 +1385,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -1286,7 +1404,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest.py index 127f42b2a1..ce9048bc86 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest.py @@ -13,33 +13,26 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging +import json # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries from google.api_core import rest_helpers from google.api_core import rest_streaming -from google.api_core import path_template from google.api_core import gapic_v1 from google.protobuf import json_format from google.api_core import operations_v1 from google.cloud.location import locations_pb2 # type: ignore + from requests import __version__ as requests_version import dataclasses -import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - from google.cloud.firestore_admin_v1.types import backup from google.cloud.firestore_admin_v1.types import database @@ -50,16 +43,28 @@ from google.protobuf import empty_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore -from .base import ( - FirestoreAdminTransport, - DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, -) +from .rest_base import _BaseFirestoreAdminRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=requests_version, + rest_version=f"requests@{requests_version}", ) @@ -267,8 +272,11 @@ def post_update_field(self, response): def pre_bulk_delete_documents( self, request: firestore_admin.BulkDeleteDocumentsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore_admin.BulkDeleteDocumentsRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.BulkDeleteDocumentsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for bulk_delete_documents Override in a subclass to manipulate the request or metadata @@ -290,8 +298,11 @@ def post_bulk_delete_documents( def pre_create_backup_schedule( self, request: firestore_admin.CreateBackupScheduleRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore_admin.CreateBackupScheduleRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.CreateBackupScheduleRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for create_backup_schedule Override in a subclass to manipulate the request or metadata @@ -313,8 +324,10 @@ def post_create_backup_schedule( def pre_create_database( self, request: firestore_admin.CreateDatabaseRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore_admin.CreateDatabaseRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.CreateDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for create_database Override in a subclass to manipulate the request or metadata @@ -336,8 +349,10 @@ def post_create_database( def pre_create_index( self, request: firestore_admin.CreateIndexRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore_admin.CreateIndexRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.CreateIndexRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for create_index Override in a subclass to manipulate the request or metadata @@ -359,8 +374,10 @@ def post_create_index( def pre_delete_backup( self, request: firestore_admin.DeleteBackupRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore_admin.DeleteBackupRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.DeleteBackupRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_backup Override in a subclass to manipulate the request or metadata @@ -371,8 +388,11 @@ def pre_delete_backup( def pre_delete_backup_schedule( self, request: firestore_admin.DeleteBackupScheduleRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore_admin.DeleteBackupScheduleRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.DeleteBackupScheduleRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for delete_backup_schedule Override in a subclass to manipulate the request or metadata @@ -383,8 +403,10 @@ def pre_delete_backup_schedule( def pre_delete_database( self, request: firestore_admin.DeleteDatabaseRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore_admin.DeleteDatabaseRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.DeleteDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_database Override in a subclass to manipulate the request or metadata @@ -406,8 +428,10 @@ def post_delete_database( def pre_delete_index( self, request: firestore_admin.DeleteIndexRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore_admin.DeleteIndexRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.DeleteIndexRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_index Override in a subclass to manipulate the request or metadata @@ -418,8 +442,10 @@ def pre_delete_index( def pre_export_documents( self, request: firestore_admin.ExportDocumentsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore_admin.ExportDocumentsRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.ExportDocumentsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for export_documents Override in a subclass to manipulate the request or metadata @@ -441,8 +467,10 @@ def post_export_documents( def pre_get_backup( self, request: firestore_admin.GetBackupRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore_admin.GetBackupRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.GetBackupRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_backup Override in a subclass to manipulate the request or metadata @@ -462,8 +490,11 @@ def post_get_backup(self, response: backup.Backup) -> backup.Backup: def pre_get_backup_schedule( self, request: firestore_admin.GetBackupScheduleRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore_admin.GetBackupScheduleRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.GetBackupScheduleRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for get_backup_schedule Override in a subclass to manipulate the request or metadata @@ -485,8 +516,10 @@ def post_get_backup_schedule( def pre_get_database( self, request: firestore_admin.GetDatabaseRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore_admin.GetDatabaseRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.GetDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_database Override in a subclass to manipulate the request or metadata @@ -506,8 +539,10 @@ def post_get_database(self, response: database.Database) -> database.Database: def pre_get_field( self, request: firestore_admin.GetFieldRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore_admin.GetFieldRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.GetFieldRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_field Override in a subclass to manipulate the request or metadata @@ -527,8 +562,10 @@ def post_get_field(self, response: field.Field) -> field.Field: def pre_get_index( self, request: firestore_admin.GetIndexRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore_admin.GetIndexRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.GetIndexRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_index Override in a subclass to manipulate the request or metadata @@ -548,8 +585,10 @@ def post_get_index(self, response: index.Index) -> index.Index: def pre_import_documents( self, request: firestore_admin.ImportDocumentsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore_admin.ImportDocumentsRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.ImportDocumentsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for import_documents Override in a subclass to manipulate the request or metadata @@ -571,8 +610,10 @@ def post_import_documents( def pre_list_backups( self, request: firestore_admin.ListBackupsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore_admin.ListBackupsRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.ListBackupsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_backups Override in a subclass to manipulate the request or metadata @@ -594,8 +635,11 @@ def post_list_backups( def pre_list_backup_schedules( self, request: firestore_admin.ListBackupSchedulesRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore_admin.ListBackupSchedulesRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.ListBackupSchedulesRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for list_backup_schedules Override in a subclass to manipulate the request or metadata @@ -617,8 +661,10 @@ def post_list_backup_schedules( def pre_list_databases( self, request: firestore_admin.ListDatabasesRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore_admin.ListDatabasesRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.ListDatabasesRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_databases Override in a subclass to manipulate the request or metadata @@ -640,8 +686,10 @@ def post_list_databases( def pre_list_fields( self, request: firestore_admin.ListFieldsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore_admin.ListFieldsRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.ListFieldsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_fields Override in a subclass to manipulate the request or metadata @@ -663,8 +711,10 @@ def post_list_fields( def pre_list_indexes( self, request: firestore_admin.ListIndexesRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore_admin.ListIndexesRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.ListIndexesRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_indexes Override in a subclass to manipulate the request or metadata @@ -686,8 +736,10 @@ def post_list_indexes( def pre_restore_database( self, request: firestore_admin.RestoreDatabaseRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore_admin.RestoreDatabaseRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.RestoreDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for restore_database Override in a subclass to manipulate the request or metadata @@ -709,8 +761,11 @@ def post_restore_database( def pre_update_backup_schedule( self, request: firestore_admin.UpdateBackupScheduleRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore_admin.UpdateBackupScheduleRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.UpdateBackupScheduleRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for update_backup_schedule Override in a subclass to manipulate the request or metadata @@ -732,8 +787,10 @@ def post_update_backup_schedule( def pre_update_database( self, request: firestore_admin.UpdateDatabaseRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore_admin.UpdateDatabaseRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.UpdateDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for update_database Override in a subclass to manipulate the request or metadata @@ -755,8 +812,10 @@ def post_update_database( def pre_update_field( self, request: firestore_admin.UpdateFieldRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore_admin.UpdateFieldRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.UpdateFieldRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for update_field Override in a subclass to manipulate the request or metadata @@ -778,8 +837,10 @@ def post_update_field( def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for cancel_operation Override in a subclass to manipulate the request or metadata @@ -799,8 +860,10 @@ def post_cancel_operation(self, response: None) -> None: def pre_delete_operation( self, request: operations_pb2.DeleteOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_operation Override in a subclass to manipulate the request or metadata @@ -820,8 +883,10 @@ def post_delete_operation(self, response: None) -> None: def pre_get_operation( self, request: operations_pb2.GetOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -843,8 +908,10 @@ def post_get_operation( def pre_list_operations( self, request: operations_pb2.ListOperationsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_operations Override in a subclass to manipulate the request or metadata @@ -871,8 +938,8 @@ class FirestoreAdminRestStub: _interceptor: FirestoreAdminRestInterceptor -class FirestoreAdminRestTransport(FirestoreAdminTransport): - """REST backend transport for FirestoreAdmin. +class FirestoreAdminRestTransport(_BaseFirestoreAdminRestTransport): + """REST backend synchronous transport for FirestoreAdmin. The Cloud Firestore Admin API. @@ -911,7 +978,6 @@ class FirestoreAdminRestTransport(FirestoreAdminTransport): and call it. It sends JSON representations of protocol buffers over HTTP/1.1 - """ def __init__( @@ -965,21 +1031,12 @@ def __init__( # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - super().__init__( host=host, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, api_audience=api_audience, ) self._session = AuthorizedSession( @@ -1044,19 +1101,35 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client - class _BulkDeleteDocuments(FirestoreAdminRestStub): + class _BulkDeleteDocuments( + _BaseFirestoreAdminRestTransport._BaseBulkDeleteDocuments, + FirestoreAdminRestStub, + ): def __hash__(self): - return hash("BulkDeleteDocuments") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreAdminRestTransport.BulkDeleteDocuments") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -1064,7 +1137,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the bulk delete documents method over HTTP. @@ -1083,8 +1156,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1094,47 +1169,62 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{name=projects/*/databases/*}:bulkDeleteDocuments", - "body": "*", - }, - ] + http_options = ( + _BaseFirestoreAdminRestTransport._BaseBulkDeleteDocuments._get_http_options() + ) + request, metadata = self._interceptor.pre_bulk_delete_documents( request, metadata ) - pb_request = firestore_admin.BulkDeleteDocumentsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseFirestoreAdminRestTransport._BaseBulkDeleteDocuments._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseFirestoreAdminRestTransport._BaseBulkDeleteDocuments._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseFirestoreAdminRestTransport._BaseBulkDeleteDocuments._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.BulkDeleteDocuments", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "BulkDeleteDocuments", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = FirestoreAdminRestTransport._BulkDeleteDocuments._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1145,22 +1235,60 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_bulk_delete_documents(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore.admin_v1.FirestoreAdminClient.bulk_delete_documents", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "BulkDeleteDocuments", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _CreateBackupSchedule(FirestoreAdminRestStub): + class _CreateBackupSchedule( + _BaseFirestoreAdminRestTransport._BaseCreateBackupSchedule, + FirestoreAdminRestStub, + ): def __hash__(self): - return hash("CreateBackupSchedule") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreAdminRestTransport.CreateBackupSchedule") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -1168,7 +1296,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> schedule.BackupSchedule: r"""Call the create backup schedule method over HTTP. @@ -1179,8 +1307,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.schedule.BackupSchedule: @@ -1193,47 +1323,62 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{parent=projects/*/databases/*}/backupSchedules", - "body": "backup_schedule", - }, - ] + http_options = ( + _BaseFirestoreAdminRestTransport._BaseCreateBackupSchedule._get_http_options() + ) + request, metadata = self._interceptor.pre_create_backup_schedule( request, metadata ) - pb_request = firestore_admin.CreateBackupScheduleRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseFirestoreAdminRestTransport._BaseCreateBackupSchedule._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseFirestoreAdminRestTransport._BaseCreateBackupSchedule._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseFirestoreAdminRestTransport._BaseCreateBackupSchedule._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.CreateBackupSchedule", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "CreateBackupSchedule", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = FirestoreAdminRestTransport._CreateBackupSchedule._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1246,24 +1391,59 @@ def __call__( pb_resp = schedule.BackupSchedule.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_backup_schedule(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = schedule.BackupSchedule.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore.admin_v1.FirestoreAdminClient.create_backup_schedule", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "CreateBackupSchedule", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _CreateDatabase(FirestoreAdminRestStub): + class _CreateDatabase( + _BaseFirestoreAdminRestTransport._BaseCreateDatabase, FirestoreAdminRestStub + ): def __hash__(self): - return hash("CreateDatabase") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "databaseId": "", - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreAdminRestTransport.CreateDatabase") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -1271,7 +1451,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the create database method over HTTP. @@ -1282,8 +1462,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1293,45 +1475,60 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{parent=projects/*}/databases", - "body": "database", - }, - ] - request, metadata = self._interceptor.pre_create_database(request, metadata) - pb_request = firestore_admin.CreateDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = ( + _BaseFirestoreAdminRestTransport._BaseCreateDatabase._get_http_options() + ) - # Jsonify the request body + request, metadata = self._interceptor.pre_create_database(request, metadata) + transcoded_request = _BaseFirestoreAdminRestTransport._BaseCreateDatabase._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseFirestoreAdminRestTransport._BaseCreateDatabase._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseFirestoreAdminRestTransport._BaseCreateDatabase._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.CreateDatabase", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "CreateDatabase", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = FirestoreAdminRestTransport._CreateDatabase._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1342,22 +1539,59 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_database(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore.admin_v1.FirestoreAdminClient.create_database", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "CreateDatabase", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _CreateIndex(FirestoreAdminRestStub): + class _CreateIndex( + _BaseFirestoreAdminRestTransport._BaseCreateIndex, FirestoreAdminRestStub + ): def __hash__(self): - return hash("CreateIndex") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreAdminRestTransport.CreateIndex") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -1365,7 +1599,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the create index method over HTTP. @@ -1376,8 +1610,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1387,45 +1623,60 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes", - "body": "index", - }, - ] - request, metadata = self._interceptor.pre_create_index(request, metadata) - pb_request = firestore_admin.CreateIndexRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = ( + _BaseFirestoreAdminRestTransport._BaseCreateIndex._get_http_options() + ) - # Jsonify the request body + request, metadata = self._interceptor.pre_create_index(request, metadata) + transcoded_request = _BaseFirestoreAdminRestTransport._BaseCreateIndex._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseFirestoreAdminRestTransport._BaseCreateIndex._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseFirestoreAdminRestTransport._BaseCreateIndex._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.CreateIndex", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "CreateIndex", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = FirestoreAdminRestTransport._CreateIndex._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1436,22 +1687,58 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_index(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore.admin_v1.FirestoreAdminClient.create_index", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "CreateIndex", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _DeleteBackup(FirestoreAdminRestStub): + class _DeleteBackup( + _BaseFirestoreAdminRestTransport._BaseDeleteBackup, FirestoreAdminRestStub + ): def __hash__(self): - return hash("DeleteBackup") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreAdminRestTransport.DeleteBackup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -1459,7 +1746,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ): r"""Call the delete backup method over HTTP. @@ -1470,42 +1757,61 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ - http_options: List[Dict[str, str]] = [ - { - "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/backups/*}", - }, - ] - request, metadata = self._interceptor.pre_delete_backup(request, metadata) - pb_request = firestore_admin.DeleteBackupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = ( + _BaseFirestoreAdminRestTransport._BaseDeleteBackup._get_http_options() + ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] + request, metadata = self._interceptor.pre_delete_backup(request, metadata) + transcoded_request = _BaseFirestoreAdminRestTransport._BaseDeleteBackup._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseFirestoreAdminRestTransport._BaseDeleteBackup._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.DeleteBackup", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "DeleteBackup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = FirestoreAdminRestTransport._DeleteBackup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1513,19 +1819,34 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _DeleteBackupSchedule(FirestoreAdminRestStub): + class _DeleteBackupSchedule( + _BaseFirestoreAdminRestTransport._BaseDeleteBackupSchedule, + FirestoreAdminRestStub, + ): def __hash__(self): - return hash("DeleteBackupSchedule") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreAdminRestTransport.DeleteBackupSchedule") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -1533,7 +1854,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ): r"""Call the delete backup schedule method over HTTP. @@ -1544,44 +1865,63 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ - http_options: List[Dict[str, str]] = [ - { - "method": "delete", - "uri": "/v1/{name=projects/*/databases/*/backupSchedules/*}", - }, - ] + http_options = ( + _BaseFirestoreAdminRestTransport._BaseDeleteBackupSchedule._get_http_options() + ) + request, metadata = self._interceptor.pre_delete_backup_schedule( request, metadata ) - pb_request = firestore_admin.DeleteBackupScheduleRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = _BaseFirestoreAdminRestTransport._BaseDeleteBackupSchedule._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseFirestoreAdminRestTransport._BaseDeleteBackupSchedule._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.DeleteBackupSchedule", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "DeleteBackupSchedule", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = FirestoreAdminRestTransport._DeleteBackupSchedule._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1589,19 +1929,33 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _DeleteDatabase(FirestoreAdminRestStub): + class _DeleteDatabase( + _BaseFirestoreAdminRestTransport._BaseDeleteDatabase, FirestoreAdminRestStub + ): def __hash__(self): - return hash("DeleteDatabase") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreAdminRestTransport.DeleteDatabase") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -1609,7 +1963,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the delete database method over HTTP. @@ -1620,8 +1974,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1631,38 +1987,55 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "delete", - "uri": "/v1/{name=projects/*/databases/*}", - }, - ] - request, metadata = self._interceptor.pre_delete_database(request, metadata) - pb_request = firestore_admin.DeleteDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = ( + _BaseFirestoreAdminRestTransport._BaseDeleteDatabase._get_http_options() + ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] + request, metadata = self._interceptor.pre_delete_database(request, metadata) + transcoded_request = _BaseFirestoreAdminRestTransport._BaseDeleteDatabase._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseFirestoreAdminRestTransport._BaseDeleteDatabase._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.DeleteDatabase", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "DeleteDatabase", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = FirestoreAdminRestTransport._DeleteDatabase._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1673,22 +2046,58 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_database(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore.admin_v1.FirestoreAdminClient.delete_database", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "DeleteDatabase", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _DeleteIndex(FirestoreAdminRestStub): + class _DeleteIndex( + _BaseFirestoreAdminRestTransport._BaseDeleteIndex, FirestoreAdminRestStub + ): def __hash__(self): - return hash("DeleteIndex") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreAdminRestTransport.DeleteIndex") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -1696,7 +2105,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ): r"""Call the delete index method over HTTP. @@ -1707,42 +2116,61 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ - http_options: List[Dict[str, str]] = [ - { - "method": "delete", - "uri": "/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}", - }, - ] - request, metadata = self._interceptor.pre_delete_index(request, metadata) - pb_request = firestore_admin.DeleteIndexRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = ( + _BaseFirestoreAdminRestTransport._BaseDeleteIndex._get_http_options() + ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] + request, metadata = self._interceptor.pre_delete_index(request, metadata) + transcoded_request = _BaseFirestoreAdminRestTransport._BaseDeleteIndex._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseFirestoreAdminRestTransport._BaseDeleteIndex._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.DeleteIndex", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "DeleteIndex", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = FirestoreAdminRestTransport._DeleteIndex._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1750,19 +2178,34 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _ExportDocuments(FirestoreAdminRestStub): + class _ExportDocuments( + _BaseFirestoreAdminRestTransport._BaseExportDocuments, FirestoreAdminRestStub + ): def __hash__(self): - return hash("ExportDocuments") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreAdminRestTransport.ExportDocuments") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -1770,7 +2213,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the export documents method over HTTP. @@ -1781,8 +2224,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1792,47 +2237,62 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{name=projects/*/databases/*}:exportDocuments", - "body": "*", - }, - ] + http_options = ( + _BaseFirestoreAdminRestTransport._BaseExportDocuments._get_http_options() + ) + request, metadata = self._interceptor.pre_export_documents( request, metadata ) - pb_request = firestore_admin.ExportDocumentsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseFirestoreAdminRestTransport._BaseExportDocuments._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseFirestoreAdminRestTransport._BaseExportDocuments._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseFirestoreAdminRestTransport._BaseExportDocuments._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.ExportDocuments", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "ExportDocuments", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = FirestoreAdminRestTransport._ExportDocuments._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1843,22 +2303,58 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_documents(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore.admin_v1.FirestoreAdminClient.export_documents", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "ExportDocuments", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _GetBackup(FirestoreAdminRestStub): + class _GetBackup( + _BaseFirestoreAdminRestTransport._BaseGetBackup, FirestoreAdminRestStub + ): def __hash__(self): - return hash("GetBackup") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreAdminRestTransport.GetBackup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -1866,7 +2362,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> backup.Backup: r"""Call the get backup method over HTTP. @@ -1877,8 +2373,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.backup.Backup: @@ -1890,38 +2388,59 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*/backups/*}", - }, - ] - request, metadata = self._interceptor.pre_get_backup(request, metadata) - pb_request = firestore_admin.GetBackupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = ( + _BaseFirestoreAdminRestTransport._BaseGetBackup._get_http_options() + ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] + request, metadata = self._interceptor.pre_get_backup(request, metadata) + transcoded_request = ( + _BaseFirestoreAdminRestTransport._BaseGetBackup._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseFirestoreAdminRestTransport._BaseGetBackup._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.GetBackup", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "GetBackup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = FirestoreAdminRestTransport._GetBackup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1934,22 +2453,58 @@ def __call__( pb_resp = backup.Backup.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_backup(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = backup.Backup.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore.admin_v1.FirestoreAdminClient.get_backup", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "GetBackup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _GetBackupSchedule(FirestoreAdminRestStub): + class _GetBackupSchedule( + _BaseFirestoreAdminRestTransport._BaseGetBackupSchedule, FirestoreAdminRestStub + ): def __hash__(self): - return hash("GetBackupSchedule") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreAdminRestTransport.GetBackupSchedule") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -1957,7 +2512,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> schedule.BackupSchedule: r"""Call the get backup schedule method over HTTP. @@ -1968,8 +2523,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.schedule.BackupSchedule: @@ -1982,40 +2539,57 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/databases/*/backupSchedules/*}", - }, - ] + http_options = ( + _BaseFirestoreAdminRestTransport._BaseGetBackupSchedule._get_http_options() + ) + request, metadata = self._interceptor.pre_get_backup_schedule( request, metadata ) - pb_request = firestore_admin.GetBackupScheduleRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = _BaseFirestoreAdminRestTransport._BaseGetBackupSchedule._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseFirestoreAdminRestTransport._BaseGetBackupSchedule._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.GetBackupSchedule", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "GetBackupSchedule", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = FirestoreAdminRestTransport._GetBackupSchedule._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2028,22 +2602,58 @@ def __call__( pb_resp = schedule.BackupSchedule.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_backup_schedule(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = schedule.BackupSchedule.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore.admin_v1.FirestoreAdminClient.get_backup_schedule", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "GetBackupSchedule", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _GetDatabase(FirestoreAdminRestStub): + class _GetDatabase( + _BaseFirestoreAdminRestTransport._BaseGetDatabase, FirestoreAdminRestStub + ): def __hash__(self): - return hash("GetDatabase") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreAdminRestTransport.GetDatabase") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -2051,7 +2661,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> database.Database: r"""Call the get database method over HTTP. @@ -2062,46 +2672,65 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.database.Database: A Cloud Firestore Database. """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/databases/*}", - }, - ] - request, metadata = self._interceptor.pre_get_database(request, metadata) - pb_request = firestore_admin.GetDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = ( + _BaseFirestoreAdminRestTransport._BaseGetDatabase._get_http_options() + ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] + request, metadata = self._interceptor.pre_get_database(request, metadata) + transcoded_request = _BaseFirestoreAdminRestTransport._BaseGetDatabase._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseFirestoreAdminRestTransport._BaseGetDatabase._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.GetDatabase", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "GetDatabase", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = FirestoreAdminRestTransport._GetDatabase._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2114,22 +2743,58 @@ def __call__( pb_resp = database.Database.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_database(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = database.Database.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore.admin_v1.FirestoreAdminClient.get_database", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "GetDatabase", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _GetField(FirestoreAdminRestStub): + class _GetField( + _BaseFirestoreAdminRestTransport._BaseGetField, FirestoreAdminRestStub + ): def __hash__(self): - return hash("GetField") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreAdminRestTransport.GetField") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -2137,7 +2802,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> field.Field: r"""Call the get field method over HTTP. @@ -2148,8 +2813,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.field.Field: @@ -2161,38 +2828,59 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/databases/*/collectionGroups/*/fields/*}", - }, - ] - request, metadata = self._interceptor.pre_get_field(request, metadata) - pb_request = firestore_admin.GetFieldRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = ( + _BaseFirestoreAdminRestTransport._BaseGetField._get_http_options() + ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] + request, metadata = self._interceptor.pre_get_field(request, metadata) + transcoded_request = ( + _BaseFirestoreAdminRestTransport._BaseGetField._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseFirestoreAdminRestTransport._BaseGetField._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.GetField", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "GetField", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = FirestoreAdminRestTransport._GetField._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2205,22 +2893,58 @@ def __call__( pb_resp = field.Field.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_field(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = field.Field.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore.admin_v1.FirestoreAdminClient.get_field", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "GetField", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _GetIndex(FirestoreAdminRestStub): + class _GetIndex( + _BaseFirestoreAdminRestTransport._BaseGetIndex, FirestoreAdminRestStub + ): def __hash__(self): - return hash("GetIndex") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreAdminRestTransport.GetIndex") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -2228,7 +2952,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> index.Index: r"""Call the get index method over HTTP. @@ -2239,8 +2963,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.index.Index: @@ -2250,38 +2976,59 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}", - }, - ] - request, metadata = self._interceptor.pre_get_index(request, metadata) - pb_request = firestore_admin.GetIndexRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = ( + _BaseFirestoreAdminRestTransport._BaseGetIndex._get_http_options() + ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] + request, metadata = self._interceptor.pre_get_index(request, metadata) + transcoded_request = ( + _BaseFirestoreAdminRestTransport._BaseGetIndex._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseFirestoreAdminRestTransport._BaseGetIndex._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.GetIndex", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "GetIndex", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = FirestoreAdminRestTransport._GetIndex._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2294,22 +3041,59 @@ def __call__( pb_resp = index.Index.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_index(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = index.Index.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore.admin_v1.FirestoreAdminClient.get_index", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "GetIndex", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _ImportDocuments(FirestoreAdminRestStub): + class _ImportDocuments( + _BaseFirestoreAdminRestTransport._BaseImportDocuments, FirestoreAdminRestStub + ): def __hash__(self): - return hash("ImportDocuments") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreAdminRestTransport.ImportDocuments") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -2317,7 +3101,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the import documents method over HTTP. @@ -2328,8 +3112,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -2339,47 +3125,62 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{name=projects/*/databases/*}:importDocuments", - "body": "*", - }, - ] + http_options = ( + _BaseFirestoreAdminRestTransport._BaseImportDocuments._get_http_options() + ) + request, metadata = self._interceptor.pre_import_documents( request, metadata ) - pb_request = firestore_admin.ImportDocumentsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseFirestoreAdminRestTransport._BaseImportDocuments._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseFirestoreAdminRestTransport._BaseImportDocuments._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseFirestoreAdminRestTransport._BaseImportDocuments._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.ImportDocuments", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "ImportDocuments", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = FirestoreAdminRestTransport._ImportDocuments._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2390,22 +3191,58 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_import_documents(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore.admin_v1.FirestoreAdminClient.import_documents", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "ImportDocuments", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _ListBackups(FirestoreAdminRestStub): + class _ListBackups( + _BaseFirestoreAdminRestTransport._BaseListBackups, FirestoreAdminRestStub + ): def __hash__(self): - return hash("ListBackups") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreAdminRestTransport.ListBackups") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -2413,7 +3250,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> firestore_admin.ListBackupsResponse: r"""Call the list backups method over HTTP. @@ -2424,8 +3261,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.firestore_admin.ListBackupsResponse: @@ -2434,38 +3273,55 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{parent=projects/*/locations/*}/backups", - }, - ] - request, metadata = self._interceptor.pre_list_backups(request, metadata) - pb_request = firestore_admin.ListBackupsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = ( + _BaseFirestoreAdminRestTransport._BaseListBackups._get_http_options() + ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] + request, metadata = self._interceptor.pre_list_backups(request, metadata) + transcoded_request = _BaseFirestoreAdminRestTransport._BaseListBackups._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseFirestoreAdminRestTransport._BaseListBackups._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.ListBackups", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "ListBackups", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = FirestoreAdminRestTransport._ListBackups._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2478,22 +3334,61 @@ def __call__( pb_resp = firestore_admin.ListBackupsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_backups(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = firestore_admin.ListBackupsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore.admin_v1.FirestoreAdminClient.list_backups", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "ListBackups", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _ListBackupSchedules(FirestoreAdminRestStub): + class _ListBackupSchedules( + _BaseFirestoreAdminRestTransport._BaseListBackupSchedules, + FirestoreAdminRestStub, + ): def __hash__(self): - return hash("ListBackupSchedules") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreAdminRestTransport.ListBackupSchedules") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -2501,7 +3396,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> firestore_admin.ListBackupSchedulesResponse: r"""Call the list backup schedules method over HTTP. @@ -2512,8 +3407,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.firestore_admin.ListBackupSchedulesResponse: @@ -2522,40 +3419,57 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{parent=projects/*/databases/*}/backupSchedules", - }, - ] + http_options = ( + _BaseFirestoreAdminRestTransport._BaseListBackupSchedules._get_http_options() + ) + request, metadata = self._interceptor.pre_list_backup_schedules( request, metadata ) - pb_request = firestore_admin.ListBackupSchedulesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = _BaseFirestoreAdminRestTransport._BaseListBackupSchedules._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseFirestoreAdminRestTransport._BaseListBackupSchedules._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.ListBackupSchedules", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "ListBackupSchedules", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = FirestoreAdminRestTransport._ListBackupSchedules._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2568,22 +3482,60 @@ def __call__( pb_resp = firestore_admin.ListBackupSchedulesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_backup_schedules(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + firestore_admin.ListBackupSchedulesResponse.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore.admin_v1.FirestoreAdminClient.list_backup_schedules", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "ListBackupSchedules", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _ListDatabases(FirestoreAdminRestStub): + class _ListDatabases( + _BaseFirestoreAdminRestTransport._BaseListDatabases, FirestoreAdminRestStub + ): def __hash__(self): - return hash("ListDatabases") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreAdminRestTransport.ListDatabases") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -2591,7 +3543,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> firestore_admin.ListDatabasesResponse: r"""Call the list databases method over HTTP. @@ -2603,46 +3555,65 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.firestore_admin.ListDatabasesResponse: The list of databases for a project. """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{parent=projects/*}/databases", - }, - ] - request, metadata = self._interceptor.pre_list_databases(request, metadata) - pb_request = firestore_admin.ListDatabasesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = ( + _BaseFirestoreAdminRestTransport._BaseListDatabases._get_http_options() + ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] + request, metadata = self._interceptor.pre_list_databases(request, metadata) + transcoded_request = _BaseFirestoreAdminRestTransport._BaseListDatabases._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseFirestoreAdminRestTransport._BaseListDatabases._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.ListDatabases", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "ListDatabases", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = FirestoreAdminRestTransport._ListDatabases._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2655,22 +3626,60 @@ def __call__( pb_resp = firestore_admin.ListDatabasesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_databases(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = firestore_admin.ListDatabasesResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore.admin_v1.FirestoreAdminClient.list_databases", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "ListDatabases", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _ListFields(FirestoreAdminRestStub): + class _ListFields( + _BaseFirestoreAdminRestTransport._BaseListFields, FirestoreAdminRestStub + ): def __hash__(self): - return hash("ListFields") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreAdminRestTransport.ListFields") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -2678,7 +3687,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> firestore_admin.ListFieldsResponse: r"""Call the list fields method over HTTP. @@ -2689,8 +3698,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.firestore_admin.ListFieldsResponse: @@ -2699,38 +3710,57 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{parent=projects/*/databases/*/collectionGroups/*}/fields", - }, - ] - request, metadata = self._interceptor.pre_list_fields(request, metadata) - pb_request = firestore_admin.ListFieldsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = ( + _BaseFirestoreAdminRestTransport._BaseListFields._get_http_options() + ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] + request, metadata = self._interceptor.pre_list_fields(request, metadata) + transcoded_request = _BaseFirestoreAdminRestTransport._BaseListFields._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseFirestoreAdminRestTransport._BaseListFields._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.ListFields", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "ListFields", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = FirestoreAdminRestTransport._ListFields._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2743,22 +3773,60 @@ def __call__( pb_resp = firestore_admin.ListFieldsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_fields(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = firestore_admin.ListFieldsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore.admin_v1.FirestoreAdminClient.list_fields", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "ListFields", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _ListIndexes(FirestoreAdminRestStub): + class _ListIndexes( + _BaseFirestoreAdminRestTransport._BaseListIndexes, FirestoreAdminRestStub + ): def __hash__(self): - return hash("ListIndexes") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreAdminRestTransport.ListIndexes") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -2766,7 +3834,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> firestore_admin.ListIndexesResponse: r"""Call the list indexes method over HTTP. @@ -2777,8 +3845,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.firestore_admin.ListIndexesResponse: @@ -2787,38 +3857,55 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes", - }, - ] - request, metadata = self._interceptor.pre_list_indexes(request, metadata) - pb_request = firestore_admin.ListIndexesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = ( + _BaseFirestoreAdminRestTransport._BaseListIndexes._get_http_options() + ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] + request, metadata = self._interceptor.pre_list_indexes(request, metadata) + transcoded_request = _BaseFirestoreAdminRestTransport._BaseListIndexes._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseFirestoreAdminRestTransport._BaseListIndexes._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.ListIndexes", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "ListIndexes", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = FirestoreAdminRestTransport._ListIndexes._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2831,22 +3918,61 @@ def __call__( pb_resp = firestore_admin.ListIndexesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_indexes(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = firestore_admin.ListIndexesResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore.admin_v1.FirestoreAdminClient.list_indexes", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "ListIndexes", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _RestoreDatabase(FirestoreAdminRestStub): + class _RestoreDatabase( + _BaseFirestoreAdminRestTransport._BaseRestoreDatabase, FirestoreAdminRestStub + ): def __hash__(self): - return hash("RestoreDatabase") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreAdminRestTransport.RestoreDatabase") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -2854,7 +3980,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the restore database method over HTTP. @@ -2865,8 +3991,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -2876,47 +4004,62 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{parent=projects/*}/databases:restore", - "body": "*", - }, - ] + http_options = ( + _BaseFirestoreAdminRestTransport._BaseRestoreDatabase._get_http_options() + ) + request, metadata = self._interceptor.pre_restore_database( request, metadata ) - pb_request = firestore_admin.RestoreDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseFirestoreAdminRestTransport._BaseRestoreDatabase._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseFirestoreAdminRestTransport._BaseRestoreDatabase._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseFirestoreAdminRestTransport._BaseRestoreDatabase._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.RestoreDatabase", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "RestoreDatabase", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = FirestoreAdminRestTransport._RestoreDatabase._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2927,22 +4070,60 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_restore_database(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore.admin_v1.FirestoreAdminClient.restore_database", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "RestoreDatabase", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _UpdateBackupSchedule(FirestoreAdminRestStub): + class _UpdateBackupSchedule( + _BaseFirestoreAdminRestTransport._BaseUpdateBackupSchedule, + FirestoreAdminRestStub, + ): def __hash__(self): - return hash("UpdateBackupSchedule") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreAdminRestTransport.UpdateBackupSchedule") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -2950,7 +4131,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> schedule.BackupSchedule: r"""Call the update backup schedule method over HTTP. @@ -2961,8 +4142,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.schedule.BackupSchedule: @@ -2975,47 +4158,62 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "patch", - "uri": "/v1/{backup_schedule.name=projects/*/databases/*/backupSchedules/*}", - "body": "backup_schedule", - }, - ] + http_options = ( + _BaseFirestoreAdminRestTransport._BaseUpdateBackupSchedule._get_http_options() + ) + request, metadata = self._interceptor.pre_update_backup_schedule( request, metadata ) - pb_request = firestore_admin.UpdateBackupScheduleRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseFirestoreAdminRestTransport._BaseUpdateBackupSchedule._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseFirestoreAdminRestTransport._BaseUpdateBackupSchedule._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseFirestoreAdminRestTransport._BaseUpdateBackupSchedule._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.UpdateBackupSchedule", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "UpdateBackupSchedule", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = FirestoreAdminRestTransport._UpdateBackupSchedule._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -3028,22 +4226,59 @@ def __call__( pb_resp = schedule.BackupSchedule.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_backup_schedule(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = schedule.BackupSchedule.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore.admin_v1.FirestoreAdminClient.update_backup_schedule", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "UpdateBackupSchedule", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _UpdateDatabase(FirestoreAdminRestStub): + class _UpdateDatabase( + _BaseFirestoreAdminRestTransport._BaseUpdateDatabase, FirestoreAdminRestStub + ): def __hash__(self): - return hash("UpdateDatabase") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreAdminRestTransport.UpdateDatabase") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -3051,7 +4286,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the update database method over HTTP. @@ -3062,8 +4297,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -3073,45 +4310,60 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "patch", - "uri": "/v1/{database.name=projects/*/databases/*}", - "body": "database", - }, - ] - request, metadata = self._interceptor.pre_update_database(request, metadata) - pb_request = firestore_admin.UpdateDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = ( + _BaseFirestoreAdminRestTransport._BaseUpdateDatabase._get_http_options() + ) - # Jsonify the request body + request, metadata = self._interceptor.pre_update_database(request, metadata) + transcoded_request = _BaseFirestoreAdminRestTransport._BaseUpdateDatabase._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseFirestoreAdminRestTransport._BaseUpdateDatabase._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseFirestoreAdminRestTransport._BaseUpdateDatabase._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.UpdateDatabase", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "UpdateDatabase", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = FirestoreAdminRestTransport._UpdateDatabase._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -3122,22 +4374,59 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_database(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore.admin_v1.FirestoreAdminClient.update_database", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "UpdateDatabase", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _UpdateField(FirestoreAdminRestStub): + class _UpdateField( + _BaseFirestoreAdminRestTransport._BaseUpdateField, FirestoreAdminRestStub + ): def __hash__(self): - return hash("UpdateField") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreAdminRestTransport.UpdateField") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -3145,7 +4434,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the update field method over HTTP. @@ -3156,8 +4445,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -3167,45 +4458,60 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "patch", - "uri": "/v1/{field.name=projects/*/databases/*/collectionGroups/*/fields/*}", - "body": "field", - }, - ] - request, metadata = self._interceptor.pre_update_field(request, metadata) - pb_request = firestore_admin.UpdateFieldRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = ( + _BaseFirestoreAdminRestTransport._BaseUpdateField._get_http_options() + ) - # Jsonify the request body + request, metadata = self._interceptor.pre_update_field(request, metadata) + transcoded_request = _BaseFirestoreAdminRestTransport._BaseUpdateField._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseFirestoreAdminRestTransport._BaseUpdateField._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseFirestoreAdminRestTransport._BaseUpdateField._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.UpdateField", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "UpdateField", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = FirestoreAdminRestTransport._UpdateField._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -3216,7 +4522,29 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_field(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore.admin_v1.FirestoreAdminClient.update_field", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "UpdateField", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -3426,14 +4754,42 @@ def update_field( def cancel_operation(self): return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - class _CancelOperation(FirestoreAdminRestStub): + class _CancelOperation( + _BaseFirestoreAdminRestTransport._BaseCancelOperation, FirestoreAdminRestStub + ): + def __hash__(self): + return hash("FirestoreAdminRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + def __call__( self, request: operations_pb2.CancelOperationRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Call the cancel operation method over HTTP. @@ -3443,41 +4799,68 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{name=projects/*/databases/*/operations/*}:cancel", - "body": "*", - }, - ] + http_options = ( + _BaseFirestoreAdminRestTransport._BaseCancelOperation._get_http_options() + ) request, metadata = self._interceptor.pre_cancel_operation( request, metadata ) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) + transcoded_request = _BaseFirestoreAdminRestTransport._BaseCancelOperation._get_transcoded_request( + http_options, request + ) - body = json.dumps(transcoded_request["body"]) - uri = transcoded_request["uri"] - method = transcoded_request["method"] + body = _BaseFirestoreAdminRestTransport._BaseCancelOperation._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) + query_params = _BaseFirestoreAdminRestTransport._BaseCancelOperation._get_query_params_json( + transcoded_request + ) - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.CancelOperation", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, + # Send the request + response = FirestoreAdminRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -3491,14 +4874,41 @@ def __call__( def delete_operation(self): return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - class _DeleteOperation(FirestoreAdminRestStub): + class _DeleteOperation( + _BaseFirestoreAdminRestTransport._BaseDeleteOperation, FirestoreAdminRestStub + ): + def __hash__(self): + return hash("FirestoreAdminRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__( self, request: operations_pb2.DeleteOperationRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Call the delete operation method over HTTP. @@ -3508,38 +4918,63 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ - http_options: List[Dict[str, str]] = [ - { - "method": "delete", - "uri": "/v1/{name=projects/*/databases/*/operations/*}", - }, - ] + http_options = ( + _BaseFirestoreAdminRestTransport._BaseDeleteOperation._get_http_options() + ) request, metadata = self._interceptor.pre_delete_operation( request, metadata ) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = _BaseFirestoreAdminRestTransport._BaseDeleteOperation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) + query_params = _BaseFirestoreAdminRestTransport._BaseDeleteOperation._get_query_params_json( + transcoded_request + ) - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.DeleteOperation", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), + # Send the request + response = FirestoreAdminRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -3553,14 +4988,41 @@ def __call__( def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - class _GetOperation(FirestoreAdminRestStub): + class _GetOperation( + _BaseFirestoreAdminRestTransport._BaseGetOperation, FirestoreAdminRestStub + ): + def __hash__(self): + return hash("FirestoreAdminRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__( self, request: operations_pb2.GetOperationRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. @@ -3570,39 +5032,64 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.Operation: Response from GetOperation method. """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/databases/*/operations/*}", - }, - ] + http_options = ( + _BaseFirestoreAdminRestTransport._BaseGetOperation._get_http_options() + ) request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = _BaseFirestoreAdminRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) + query_params = _BaseFirestoreAdminRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.GetOperation", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), + # Send the request + response = FirestoreAdminRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -3610,23 +5097,72 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore.admin_v1.FirestoreAdminAsyncClient.GetOperation", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property def list_operations(self): return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - class _ListOperations(FirestoreAdminRestStub): + class _ListOperations( + _BaseFirestoreAdminRestTransport._BaseListOperations, FirestoreAdminRestStub + ): + def __hash__(self): + return hash("FirestoreAdminRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__( self, request: operations_pb2.ListOperationsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. @@ -3636,39 +5172,64 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.ListOperationsResponse: Response from ListOperations method. """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/databases/*}/operations", - }, - ] + http_options = ( + _BaseFirestoreAdminRestTransport._BaseListOperations._get_http_options() + ) request, metadata = self._interceptor.pre_list_operations(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = _BaseFirestoreAdminRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) + query_params = _BaseFirestoreAdminRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.ListOperations", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), + # Send the request + response = FirestoreAdminRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -3676,9 +5237,31 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore.admin_v1.FirestoreAdminAsyncClient.ListOperations", + extra={ + "serviceName": "google.firestore.admin.v1.FirestoreAdmin", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest_base.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest_base.py new file mode 100644 index 0000000000..66b429c065 --- /dev/null +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest_base.py @@ -0,0 +1,1437 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.cloud.location import locations_pb2 # type: ignore +from .base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.firestore_admin_v1.types import backup +from google.cloud.firestore_admin_v1.types import database +from google.cloud.firestore_admin_v1.types import field +from google.cloud.firestore_admin_v1.types import firestore_admin +from google.cloud.firestore_admin_v1.types import index +from google.cloud.firestore_admin_v1.types import schedule +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + + +class _BaseFirestoreAdminRestTransport(FirestoreAdminTransport): + """Base REST backend transport for FirestoreAdmin. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "firestore.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'firestore.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseBulkDeleteDocuments: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/databases/*}:bulkDeleteDocuments", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore_admin.BulkDeleteDocumentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreAdminRestTransport._BaseBulkDeleteDocuments._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateBackupSchedule: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/databases/*}/backupSchedules", + "body": "backup_schedule", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore_admin.CreateBackupScheduleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreAdminRestTransport._BaseCreateBackupSchedule._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateDatabase: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "databaseId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*}/databases", + "body": "database", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore_admin.CreateDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreAdminRestTransport._BaseCreateDatabase._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateIndex: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes", + "body": "index", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore_admin.CreateIndexRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreAdminRestTransport._BaseCreateIndex._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteBackup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/backups/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore_admin.DeleteBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreAdminRestTransport._BaseDeleteBackup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteBackupSchedule: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/databases/*/backupSchedules/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore_admin.DeleteBackupScheduleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreAdminRestTransport._BaseDeleteBackupSchedule._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteDatabase: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/databases/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore_admin.DeleteDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreAdminRestTransport._BaseDeleteDatabase._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteIndex: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore_admin.DeleteIndexRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreAdminRestTransport._BaseDeleteIndex._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseExportDocuments: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/databases/*}:exportDocuments", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore_admin.ExportDocumentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreAdminRestTransport._BaseExportDocuments._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetBackup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/backups/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore_admin.GetBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreAdminRestTransport._BaseGetBackup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetBackupSchedule: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/databases/*/backupSchedules/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore_admin.GetBackupScheduleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreAdminRestTransport._BaseGetBackupSchedule._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetDatabase: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/databases/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore_admin.GetDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreAdminRestTransport._BaseGetDatabase._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetField: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/databases/*/collectionGroups/*/fields/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore_admin.GetFieldRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreAdminRestTransport._BaseGetField._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetIndex: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore_admin.GetIndexRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreAdminRestTransport._BaseGetIndex._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseImportDocuments: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/databases/*}:importDocuments", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore_admin.ImportDocumentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreAdminRestTransport._BaseImportDocuments._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListBackups: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/backups", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore_admin.ListBackupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreAdminRestTransport._BaseListBackups._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListBackupSchedules: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/databases/*}/backupSchedules", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore_admin.ListBackupSchedulesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreAdminRestTransport._BaseListBackupSchedules._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListDatabases: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*}/databases", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore_admin.ListDatabasesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreAdminRestTransport._BaseListDatabases._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListFields: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/databases/*/collectionGroups/*}/fields", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore_admin.ListFieldsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreAdminRestTransport._BaseListFields._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListIndexes: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore_admin.ListIndexesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreAdminRestTransport._BaseListIndexes._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseRestoreDatabase: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*}/databases:restore", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore_admin.RestoreDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreAdminRestTransport._BaseRestoreDatabase._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateBackupSchedule: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{backup_schedule.name=projects/*/databases/*/backupSchedules/*}", + "body": "backup_schedule", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore_admin.UpdateBackupScheduleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreAdminRestTransport._BaseUpdateBackupSchedule._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateDatabase: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{database.name=projects/*/databases/*}", + "body": "database", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore_admin.UpdateDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreAdminRestTransport._BaseUpdateDatabase._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateField: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{field.name=projects/*/databases/*/collectionGroups/*/fields/*}", + "body": "field", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore_admin.UpdateFieldRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreAdminRestTransport._BaseUpdateField._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/databases/*/operations/*}:cancel", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/databases/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/databases/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/databases/*}/operations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseFirestoreAdminRestTransport",) diff --git a/google/cloud/firestore_admin_v1/types/firestore_admin.py b/google/cloud/firestore_admin_v1/types/firestore_admin.py index 20a105bd61..28a94bc5aa 100644 --- a/google/cloud/firestore_admin_v1/types/firestore_admin.py +++ b/google/cloud/firestore_admin_v1/types/firestore_admin.py @@ -780,12 +780,31 @@ class ListBackupsRequest(proto.Message): ``{location} = '-'`` to list backups from all locations for the given project. This allows listing backups from a single location or from all locations. + filter (str): + An expression that filters the list of returned backups. + + A filter expression consists of a field name, a comparison + operator, and a value for filtering. The value must be a + string, a number, or a boolean. The comparison operator must + be one of: ``<``, ``>``, ``<=``, ``>=``, ``!=``, ``=``, or + ``:``. Colon ``:`` is the contains operator. Filter rules + are not case sensitive. + + The following fields in the + [Backup][google.firestore.admin.v1.Backup] are eligible for + filtering: + + - ``database_uid`` (supports ``=`` only) """ parent: str = proto.Field( proto.STRING, number=1, ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) class ListBackupsResponse(proto.Message): diff --git a/google/cloud/firestore_v1/services/firestore/async_client.py b/google/cloud/firestore_v1/services/firestore/async_client.py index ec1d55e76f..231e24532d 100644 --- a/google/cloud/firestore_v1/services/firestore/async_client.py +++ b/google/cloud/firestore_v1/services/firestore/async_client.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict import re from typing import ( @@ -63,6 +64,15 @@ from .transports.grpc_asyncio import FirestoreGrpcAsyncIOTransport from .client import FirestoreClient +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + class FirestoreAsyncClient: """The Cloud Firestore service. @@ -269,13 +279,35 @@ def __init__( client_info=client_info, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.firestore_v1.FirestoreAsyncClient`.", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.firestore.v1.Firestore", + "credentialsType": None, + }, + ) + async def get_document( self, request: Optional[Union[firestore.GetDocumentRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> document.Document: r"""Gets a single document. @@ -313,8 +345,10 @@ async def sample_get_document(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_v1.types.Document: @@ -361,7 +395,7 @@ async def list_documents( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListDocumentsAsyncPager: r"""Lists documents. @@ -400,8 +434,10 @@ async def sample_list_documents(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_v1.services.firestore.pagers.ListDocumentsAsyncPager: @@ -468,7 +504,7 @@ async def update_document( update_mask: Optional[common.DocumentMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gf_document.Document: r"""Updates or inserts a document. @@ -527,8 +563,10 @@ async def sample_update_document(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_v1.types.Document: @@ -594,7 +632,7 @@ async def delete_document( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a document. @@ -636,8 +674,10 @@ async def sample_delete_document(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -688,7 +728,7 @@ def batch_get_documents( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> Awaitable[AsyncIterable[firestore.BatchGetDocumentsResponse]]: r"""Gets multiple documents. @@ -730,8 +770,10 @@ async def sample_batch_get_documents(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: AsyncIterable[google.cloud.firestore_v1.types.BatchGetDocumentsResponse]: @@ -778,7 +820,7 @@ async def begin_transaction( database: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> firestore.BeginTransactionResponse: r"""Starts a new transaction. @@ -822,8 +864,10 @@ async def sample_begin_transaction(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_v1.types.BeginTransactionResponse: @@ -885,7 +929,7 @@ async def commit( writes: Optional[MutableSequence[gf_write.Write]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> firestore.CommitResponse: r"""Commits a transaction, while optionally updating documents. @@ -938,8 +982,10 @@ async def sample_commit(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_v1.types.CommitResponse: @@ -1001,7 +1047,7 @@ async def rollback( transaction: Optional[bytes] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Rolls back a transaction. @@ -1050,8 +1096,10 @@ async def sample_rollback(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -1102,7 +1150,7 @@ def run_query( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> Awaitable[AsyncIterable[firestore.RunQueryResponse]]: r"""Runs a query. @@ -1141,8 +1189,10 @@ async def sample_run_query(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: AsyncIterable[google.cloud.firestore_v1.types.RunQueryResponse]: @@ -1188,7 +1238,7 @@ def run_aggregation_query( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> Awaitable[AsyncIterable[firestore.RunAggregationQueryResponse]]: r"""Runs an aggregation query. @@ -1241,8 +1291,10 @@ async def sample_run_aggregation_query(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: AsyncIterable[google.cloud.firestore_v1.types.RunAggregationQueryResponse]: @@ -1288,7 +1340,7 @@ async def partition_query( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.PartitionQueryAsyncPager: r"""Partitions a query by returning partition cursors that can be used to run the query in parallel. The @@ -1330,8 +1382,10 @@ async def sample_partition_query(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_v1.services.firestore.pagers.PartitionQueryAsyncPager: @@ -1391,7 +1445,7 @@ def write( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> Awaitable[AsyncIterable[firestore.WriteResponse]]: r"""Streams batches of document updates and deletes, in order. This method is only available via gRPC or @@ -1453,8 +1507,10 @@ def request_generator(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: AsyncIterable[google.cloud.firestore_v1.types.WriteResponse]: @@ -1491,7 +1547,7 @@ def listen( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> Awaitable[AsyncIterable[firestore.ListenResponse]]: r"""Listens to changes. This method is only available via gRPC or WebChannel (not REST). @@ -1544,8 +1600,10 @@ def request_generator(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: AsyncIterable[google.cloud.firestore_v1.types.ListenResponse]: @@ -1583,7 +1641,7 @@ async def list_collection_ids( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListCollectionIdsAsyncPager: r"""Lists all the collection IDs underneath a document. @@ -1630,8 +1688,10 @@ async def sample_list_collection_ids(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_v1.services.firestore.pagers.ListCollectionIdsAsyncPager: @@ -1705,7 +1765,7 @@ async def batch_write( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> firestore.BatchWriteResponse: r"""Applies a batch of write operations. @@ -1752,8 +1812,10 @@ async def sample_batch_write(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_v1.types.BatchWriteResponse: @@ -1799,7 +1861,7 @@ async def create_document( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> document.Document: r"""Creates a new document. @@ -1837,8 +1899,10 @@ async def sample_create_document(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_v1.types.Document: @@ -1890,7 +1954,7 @@ async def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1901,8 +1965,10 @@ async def list_operations( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -1915,11 +1981,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -1947,7 +2009,7 @@ async def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1958,8 +2020,10 @@ async def get_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -1972,11 +2036,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -2004,7 +2064,7 @@ async def delete_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a long-running operation. @@ -2020,8 +2080,10 @@ async def delete_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -2033,11 +2095,7 @@ async def delete_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -2062,7 +2120,7 @@ async def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -2077,8 +2135,10 @@ async def cancel_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -2090,11 +2150,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/google/cloud/firestore_v1/services/firestore/client.py b/google/cloud/firestore_v1/services/firestore/client.py index 888c88e809..2054b14388 100644 --- a/google/cloud/firestore_v1/services/firestore/client.py +++ b/google/cloud/firestore_v1/services/firestore/client.py @@ -14,6 +14,7 @@ # limitations under the License. # from collections import OrderedDict +import logging as std_logging import os import re from typing import ( @@ -50,6 +51,15 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.cloud.firestore_v1.services.firestore import pagers from google.cloud.firestore_v1.types import aggregation_result from google.cloud.firestore_v1.types import common @@ -458,36 +468,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = FirestoreClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -497,13 +477,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or FirestoreClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -609,6 +585,10 @@ def __init__( # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( @@ -671,13 +651,36 @@ def __init__( api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.firestore_v1.FirestoreClient`.", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.firestore.v1.Firestore", + "credentialsType": None, + }, + ) + def get_document( self, request: Optional[Union[firestore.GetDocumentRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> document.Document: r"""Gets a single document. @@ -715,8 +718,10 @@ def sample_get_document(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_v1.types.Document: @@ -761,7 +766,7 @@ def list_documents( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListDocumentsPager: r"""Lists documents. @@ -800,8 +805,10 @@ def sample_list_documents(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_v1.services.firestore.pagers.ListDocumentsPager: @@ -866,7 +873,7 @@ def update_document( update_mask: Optional[common.DocumentMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gf_document.Document: r"""Updates or inserts a document. @@ -925,8 +932,10 @@ def sample_update_document(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_v1.types.Document: @@ -989,7 +998,7 @@ def delete_document( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a document. @@ -1031,8 +1040,10 @@ def sample_delete_document(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -1080,7 +1091,7 @@ def batch_get_documents( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> Iterable[firestore.BatchGetDocumentsResponse]: r"""Gets multiple documents. @@ -1122,8 +1133,10 @@ def sample_batch_get_documents(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: Iterable[google.cloud.firestore_v1.types.BatchGetDocumentsResponse]: @@ -1168,7 +1181,7 @@ def begin_transaction( database: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> firestore.BeginTransactionResponse: r"""Starts a new transaction. @@ -1212,8 +1225,10 @@ def sample_begin_transaction(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_v1.types.BeginTransactionResponse: @@ -1272,7 +1287,7 @@ def commit( writes: Optional[MutableSequence[gf_write.Write]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> firestore.CommitResponse: r"""Commits a transaction, while optionally updating documents. @@ -1325,8 +1340,10 @@ def sample_commit(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_v1.types.CommitResponse: @@ -1387,7 +1404,7 @@ def rollback( transaction: Optional[bytes] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Rolls back a transaction. @@ -1436,8 +1453,10 @@ def sample_rollback(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -1487,7 +1506,7 @@ def run_query( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> Iterable[firestore.RunQueryResponse]: r"""Runs a query. @@ -1526,8 +1545,10 @@ def sample_run_query(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: Iterable[google.cloud.firestore_v1.types.RunQueryResponse]: @@ -1571,7 +1592,7 @@ def run_aggregation_query( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> Iterable[firestore.RunAggregationQueryResponse]: r"""Runs an aggregation query. @@ -1624,8 +1645,10 @@ def sample_run_aggregation_query(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: Iterable[google.cloud.firestore_v1.types.RunAggregationQueryResponse]: @@ -1669,7 +1692,7 @@ def partition_query( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.PartitionQueryPager: r"""Partitions a query by returning partition cursors that can be used to run the query in parallel. The @@ -1711,8 +1734,10 @@ def sample_partition_query(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_v1.services.firestore.pagers.PartitionQueryPager: @@ -1770,7 +1795,7 @@ def write( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> Iterable[firestore.WriteResponse]: r"""Streams batches of document updates and deletes, in order. This method is only available via gRPC or @@ -1832,8 +1857,10 @@ def request_generator(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: Iterable[google.cloud.firestore_v1.types.WriteResponse]: @@ -1870,7 +1897,7 @@ def listen( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> Iterable[firestore.ListenResponse]: r"""Listens to changes. This method is only available via gRPC or WebChannel (not REST). @@ -1923,8 +1950,10 @@ def request_generator(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: Iterable[google.cloud.firestore_v1.types.ListenResponse]: @@ -1962,7 +1991,7 @@ def list_collection_ids( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListCollectionIdsPager: r"""Lists all the collection IDs underneath a document. @@ -2009,8 +2038,10 @@ def sample_list_collection_ids(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_v1.services.firestore.pagers.ListCollectionIdsPager: @@ -2081,7 +2112,7 @@ def batch_write( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> firestore.BatchWriteResponse: r"""Applies a batch of write operations. @@ -2128,8 +2159,10 @@ def sample_batch_write(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_v1.types.BatchWriteResponse: @@ -2173,7 +2206,7 @@ def create_document( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> document.Document: r"""Creates a new document. @@ -2211,8 +2244,10 @@ def sample_create_document(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.firestore_v1.types.Document: @@ -2275,7 +2310,7 @@ def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -2286,8 +2321,10 @@ def list_operations( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -2300,11 +2337,7 @@ def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -2332,7 +2365,7 @@ def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -2343,8 +2376,10 @@ def get_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -2357,11 +2392,7 @@ def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -2389,7 +2420,7 @@ def delete_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a long-running operation. @@ -2405,8 +2436,10 @@ def delete_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -2418,11 +2451,7 @@ def delete_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.delete_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -2447,7 +2476,7 @@ def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -2462,8 +2491,10 @@ def cancel_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -2475,11 +2506,7 @@ def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/google/cloud/firestore_v1/services/firestore/pagers.py b/google/cloud/firestore_v1/services/firestore/pagers.py index 71ebf18fb9..4e158080da 100644 --- a/google/cloud/firestore_v1/services/firestore/pagers.py +++ b/google/cloud/firestore_v1/services/firestore/pagers.py @@ -68,7 +68,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -82,8 +82,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = firestore.ListDocumentsRequest(request) @@ -142,7 +144,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -156,8 +158,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = firestore.ListDocumentsRequest(request) @@ -220,7 +224,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -234,8 +238,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = firestore.PartitionQueryRequest(request) @@ -294,7 +300,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -308,8 +314,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = firestore.PartitionQueryRequest(request) @@ -372,7 +380,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -386,8 +394,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = firestore.ListCollectionIdsRequest(request) @@ -446,7 +456,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -460,8 +470,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = firestore.ListCollectionIdsRequest(request) diff --git a/google/cloud/firestore_v1/services/firestore/transports/README.rst b/google/cloud/firestore_v1/services/firestore/transports/README.rst new file mode 100644 index 0000000000..1823b6773c --- /dev/null +++ b/google/cloud/firestore_v1/services/firestore/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`FirestoreTransport` is the ABC for all transports. +- public child `FirestoreGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `FirestoreGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseFirestoreRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `FirestoreRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/google/cloud/firestore_v1/services/firestore/transports/base.py b/google/cloud/firestore_v1/services/firestore/transports/base.py index d22e6ce3ba..f86482ce3d 100644 --- a/google/cloud/firestore_v1/services/firestore/transports/base.py +++ b/google/cloud/firestore_v1/services/firestore/transports/base.py @@ -390,6 +390,26 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/google/cloud/firestore_v1/services/firestore/transports/grpc.py b/google/cloud/firestore_v1/services/firestore/transports/grpc.py index 7d334a5394..02f2ab682c 100644 --- a/google/cloud/firestore_v1/services/firestore/transports/grpc.py +++ b/google/cloud/firestore_v1/services/firestore/transports/grpc.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -21,8 +24,11 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.firestore_v1.types import document from google.cloud.firestore_v1.types import document as gf_document @@ -32,6 +38,81 @@ from google.protobuf import empty_pb2 # type: ignore from .base import FirestoreTransport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class FirestoreGrpcTransport(FirestoreTransport): """gRPC backend transport for Firestore. @@ -193,7 +274,12 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -267,7 +353,7 @@ def get_document( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_document" not in self._stubs: - self._stubs["get_document"] = self.grpc_channel.unary_unary( + self._stubs["get_document"] = self._logged_channel.unary_unary( "/google.firestore.v1.Firestore/GetDocument", request_serializer=firestore.GetDocumentRequest.serialize, response_deserializer=document.Document.deserialize, @@ -293,7 +379,7 @@ def list_documents( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_documents" not in self._stubs: - self._stubs["list_documents"] = self.grpc_channel.unary_unary( + self._stubs["list_documents"] = self._logged_channel.unary_unary( "/google.firestore.v1.Firestore/ListDocuments", request_serializer=firestore.ListDocumentsRequest.serialize, response_deserializer=firestore.ListDocumentsResponse.deserialize, @@ -319,7 +405,7 @@ def update_document( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_document" not in self._stubs: - self._stubs["update_document"] = self.grpc_channel.unary_unary( + self._stubs["update_document"] = self._logged_channel.unary_unary( "/google.firestore.v1.Firestore/UpdateDocument", request_serializer=firestore.UpdateDocumentRequest.serialize, response_deserializer=gf_document.Document.deserialize, @@ -345,7 +431,7 @@ def delete_document( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_document" not in self._stubs: - self._stubs["delete_document"] = self.grpc_channel.unary_unary( + self._stubs["delete_document"] = self._logged_channel.unary_unary( "/google.firestore.v1.Firestore/DeleteDocument", request_serializer=firestore.DeleteDocumentRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -376,7 +462,7 @@ def batch_get_documents( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_get_documents" not in self._stubs: - self._stubs["batch_get_documents"] = self.grpc_channel.unary_stream( + self._stubs["batch_get_documents"] = self._logged_channel.unary_stream( "/google.firestore.v1.Firestore/BatchGetDocuments", request_serializer=firestore.BatchGetDocumentsRequest.serialize, response_deserializer=firestore.BatchGetDocumentsResponse.deserialize, @@ -404,7 +490,7 @@ def begin_transaction( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "begin_transaction" not in self._stubs: - self._stubs["begin_transaction"] = self.grpc_channel.unary_unary( + self._stubs["begin_transaction"] = self._logged_channel.unary_unary( "/google.firestore.v1.Firestore/BeginTransaction", request_serializer=firestore.BeginTransactionRequest.serialize, response_deserializer=firestore.BeginTransactionResponse.deserialize, @@ -429,7 +515,7 @@ def commit(self) -> Callable[[firestore.CommitRequest], firestore.CommitResponse # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "commit" not in self._stubs: - self._stubs["commit"] = self.grpc_channel.unary_unary( + self._stubs["commit"] = self._logged_channel.unary_unary( "/google.firestore.v1.Firestore/Commit", request_serializer=firestore.CommitRequest.serialize, response_deserializer=firestore.CommitResponse.deserialize, @@ -453,7 +539,7 @@ def rollback(self) -> Callable[[firestore.RollbackRequest], empty_pb2.Empty]: # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "rollback" not in self._stubs: - self._stubs["rollback"] = self.grpc_channel.unary_unary( + self._stubs["rollback"] = self._logged_channel.unary_unary( "/google.firestore.v1.Firestore/Rollback", request_serializer=firestore.RollbackRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -479,7 +565,7 @@ def run_query( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "run_query" not in self._stubs: - self._stubs["run_query"] = self.grpc_channel.unary_stream( + self._stubs["run_query"] = self._logged_channel.unary_stream( "/google.firestore.v1.Firestore/RunQuery", request_serializer=firestore.RunQueryRequest.serialize, response_deserializer=firestore.RunQueryResponse.deserialize, @@ -521,7 +607,7 @@ def run_aggregation_query( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "run_aggregation_query" not in self._stubs: - self._stubs["run_aggregation_query"] = self.grpc_channel.unary_stream( + self._stubs["run_aggregation_query"] = self._logged_channel.unary_stream( "/google.firestore.v1.Firestore/RunAggregationQuery", request_serializer=firestore.RunAggregationQueryRequest.serialize, response_deserializer=firestore.RunAggregationQueryResponse.deserialize, @@ -551,7 +637,7 @@ def partition_query( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "partition_query" not in self._stubs: - self._stubs["partition_query"] = self.grpc_channel.unary_unary( + self._stubs["partition_query"] = self._logged_channel.unary_unary( "/google.firestore.v1.Firestore/PartitionQuery", request_serializer=firestore.PartitionQueryRequest.serialize, response_deserializer=firestore.PartitionQueryResponse.deserialize, @@ -577,7 +663,7 @@ def write(self) -> Callable[[firestore.WriteRequest], firestore.WriteResponse]: # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "write" not in self._stubs: - self._stubs["write"] = self.grpc_channel.stream_stream( + self._stubs["write"] = self._logged_channel.stream_stream( "/google.firestore.v1.Firestore/Write", request_serializer=firestore.WriteRequest.serialize, response_deserializer=firestore.WriteResponse.deserialize, @@ -602,7 +688,7 @@ def listen(self) -> Callable[[firestore.ListenRequest], firestore.ListenResponse # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "listen" not in self._stubs: - self._stubs["listen"] = self.grpc_channel.stream_stream( + self._stubs["listen"] = self._logged_channel.stream_stream( "/google.firestore.v1.Firestore/Listen", request_serializer=firestore.ListenRequest.serialize, response_deserializer=firestore.ListenResponse.deserialize, @@ -630,7 +716,7 @@ def list_collection_ids( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_collection_ids" not in self._stubs: - self._stubs["list_collection_ids"] = self.grpc_channel.unary_unary( + self._stubs["list_collection_ids"] = self._logged_channel.unary_unary( "/google.firestore.v1.Firestore/ListCollectionIds", request_serializer=firestore.ListCollectionIdsRequest.serialize, response_deserializer=firestore.ListCollectionIdsResponse.deserialize, @@ -666,7 +752,7 @@ def batch_write( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_write" not in self._stubs: - self._stubs["batch_write"] = self.grpc_channel.unary_unary( + self._stubs["batch_write"] = self._logged_channel.unary_unary( "/google.firestore.v1.Firestore/BatchWrite", request_serializer=firestore.BatchWriteRequest.serialize, response_deserializer=firestore.BatchWriteResponse.deserialize, @@ -692,7 +778,7 @@ def create_document( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_document" not in self._stubs: - self._stubs["create_document"] = self.grpc_channel.unary_unary( + self._stubs["create_document"] = self._logged_channel.unary_unary( "/google.firestore.v1.Firestore/CreateDocument", request_serializer=firestore.CreateDocumentRequest.serialize, response_deserializer=document.Document.deserialize, @@ -700,7 +786,7 @@ def create_document( return self._stubs["create_document"] def close(self): - self.grpc_channel.close() + self._logged_channel.close() @property def delete_operation( @@ -712,7 +798,7 @@ def delete_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + self._stubs["delete_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/DeleteOperation", request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, response_deserializer=None, @@ -729,7 +815,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -746,7 +832,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -765,7 +851,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py b/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py index c8eaab433a..3ce6c9b317 100644 --- a/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py +++ b/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -22,8 +26,11 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.firestore_v1.types import document @@ -35,6 +42,82 @@ from .base import FirestoreTransport, DEFAULT_CLIENT_INFO from .grpc import FirestoreGrpcTransport +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class FirestoreGrpcAsyncIOTransport(FirestoreTransport): """gRPC AsyncIO backend transport for Firestore. @@ -239,7 +322,13 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -271,7 +360,7 @@ def get_document( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_document" not in self._stubs: - self._stubs["get_document"] = self.grpc_channel.unary_unary( + self._stubs["get_document"] = self._logged_channel.unary_unary( "/google.firestore.v1.Firestore/GetDocument", request_serializer=firestore.GetDocumentRequest.serialize, response_deserializer=document.Document.deserialize, @@ -299,7 +388,7 @@ def list_documents( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_documents" not in self._stubs: - self._stubs["list_documents"] = self.grpc_channel.unary_unary( + self._stubs["list_documents"] = self._logged_channel.unary_unary( "/google.firestore.v1.Firestore/ListDocuments", request_serializer=firestore.ListDocumentsRequest.serialize, response_deserializer=firestore.ListDocumentsResponse.deserialize, @@ -325,7 +414,7 @@ def update_document( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_document" not in self._stubs: - self._stubs["update_document"] = self.grpc_channel.unary_unary( + self._stubs["update_document"] = self._logged_channel.unary_unary( "/google.firestore.v1.Firestore/UpdateDocument", request_serializer=firestore.UpdateDocumentRequest.serialize, response_deserializer=gf_document.Document.deserialize, @@ -351,7 +440,7 @@ def delete_document( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_document" not in self._stubs: - self._stubs["delete_document"] = self.grpc_channel.unary_unary( + self._stubs["delete_document"] = self._logged_channel.unary_unary( "/google.firestore.v1.Firestore/DeleteDocument", request_serializer=firestore.DeleteDocumentRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -383,7 +472,7 @@ def batch_get_documents( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_get_documents" not in self._stubs: - self._stubs["batch_get_documents"] = self.grpc_channel.unary_stream( + self._stubs["batch_get_documents"] = self._logged_channel.unary_stream( "/google.firestore.v1.Firestore/BatchGetDocuments", request_serializer=firestore.BatchGetDocumentsRequest.serialize, response_deserializer=firestore.BatchGetDocumentsResponse.deserialize, @@ -412,7 +501,7 @@ def begin_transaction( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "begin_transaction" not in self._stubs: - self._stubs["begin_transaction"] = self.grpc_channel.unary_unary( + self._stubs["begin_transaction"] = self._logged_channel.unary_unary( "/google.firestore.v1.Firestore/BeginTransaction", request_serializer=firestore.BeginTransactionRequest.serialize, response_deserializer=firestore.BeginTransactionResponse.deserialize, @@ -439,7 +528,7 @@ def commit( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "commit" not in self._stubs: - self._stubs["commit"] = self.grpc_channel.unary_unary( + self._stubs["commit"] = self._logged_channel.unary_unary( "/google.firestore.v1.Firestore/Commit", request_serializer=firestore.CommitRequest.serialize, response_deserializer=firestore.CommitResponse.deserialize, @@ -465,7 +554,7 @@ def rollback( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "rollback" not in self._stubs: - self._stubs["rollback"] = self.grpc_channel.unary_unary( + self._stubs["rollback"] = self._logged_channel.unary_unary( "/google.firestore.v1.Firestore/Rollback", request_serializer=firestore.RollbackRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -491,7 +580,7 @@ def run_query( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "run_query" not in self._stubs: - self._stubs["run_query"] = self.grpc_channel.unary_stream( + self._stubs["run_query"] = self._logged_channel.unary_stream( "/google.firestore.v1.Firestore/RunQuery", request_serializer=firestore.RunQueryRequest.serialize, response_deserializer=firestore.RunQueryResponse.deserialize, @@ -534,7 +623,7 @@ def run_aggregation_query( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "run_aggregation_query" not in self._stubs: - self._stubs["run_aggregation_query"] = self.grpc_channel.unary_stream( + self._stubs["run_aggregation_query"] = self._logged_channel.unary_stream( "/google.firestore.v1.Firestore/RunAggregationQuery", request_serializer=firestore.RunAggregationQueryRequest.serialize, response_deserializer=firestore.RunAggregationQueryResponse.deserialize, @@ -566,7 +655,7 @@ def partition_query( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "partition_query" not in self._stubs: - self._stubs["partition_query"] = self.grpc_channel.unary_unary( + self._stubs["partition_query"] = self._logged_channel.unary_unary( "/google.firestore.v1.Firestore/PartitionQuery", request_serializer=firestore.PartitionQueryRequest.serialize, response_deserializer=firestore.PartitionQueryResponse.deserialize, @@ -594,7 +683,7 @@ def write( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "write" not in self._stubs: - self._stubs["write"] = self.grpc_channel.stream_stream( + self._stubs["write"] = self._logged_channel.stream_stream( "/google.firestore.v1.Firestore/Write", request_serializer=firestore.WriteRequest.serialize, response_deserializer=firestore.WriteResponse.deserialize, @@ -621,7 +710,7 @@ def listen( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "listen" not in self._stubs: - self._stubs["listen"] = self.grpc_channel.stream_stream( + self._stubs["listen"] = self._logged_channel.stream_stream( "/google.firestore.v1.Firestore/Listen", request_serializer=firestore.ListenRequest.serialize, response_deserializer=firestore.ListenResponse.deserialize, @@ -650,7 +739,7 @@ def list_collection_ids( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_collection_ids" not in self._stubs: - self._stubs["list_collection_ids"] = self.grpc_channel.unary_unary( + self._stubs["list_collection_ids"] = self._logged_channel.unary_unary( "/google.firestore.v1.Firestore/ListCollectionIds", request_serializer=firestore.ListCollectionIdsRequest.serialize, response_deserializer=firestore.ListCollectionIdsResponse.deserialize, @@ -688,7 +777,7 @@ def batch_write( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_write" not in self._stubs: - self._stubs["batch_write"] = self.grpc_channel.unary_unary( + self._stubs["batch_write"] = self._logged_channel.unary_unary( "/google.firestore.v1.Firestore/BatchWrite", request_serializer=firestore.BatchWriteRequest.serialize, response_deserializer=firestore.BatchWriteResponse.deserialize, @@ -714,7 +803,7 @@ def create_document( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_document" not in self._stubs: - self._stubs["create_document"] = self.grpc_channel.unary_unary( + self._stubs["create_document"] = self._logged_channel.unary_unary( "/google.firestore.v1.Firestore/CreateDocument", request_serializer=firestore.CreateDocumentRequest.serialize, response_deserializer=document.Document.deserialize, @@ -724,7 +813,7 @@ def create_document( def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { - self.get_document: gapic_v1.method_async.wrap_method( + self.get_document: self._wrap_method( self.get_document, default_retry=retries.AsyncRetry( initial=0.1, @@ -741,7 +830,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.list_documents: gapic_v1.method_async.wrap_method( + self.list_documents: self._wrap_method( self.list_documents, default_retry=retries.AsyncRetry( initial=0.1, @@ -758,7 +847,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.update_document: gapic_v1.method_async.wrap_method( + self.update_document: self._wrap_method( self.update_document, default_retry=retries.AsyncRetry( initial=0.1, @@ -773,7 +862,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.delete_document: gapic_v1.method_async.wrap_method( + self.delete_document: self._wrap_method( self.delete_document, default_retry=retries.AsyncRetry( initial=0.1, @@ -790,7 +879,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.batch_get_documents: gapic_v1.method_async.wrap_method( + self.batch_get_documents: self._wrap_method( self.batch_get_documents, default_retry=retries.AsyncRetry( initial=0.1, @@ -807,7 +896,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=300.0, client_info=client_info, ), - self.begin_transaction: gapic_v1.method_async.wrap_method( + self.begin_transaction: self._wrap_method( self.begin_transaction, default_retry=retries.AsyncRetry( initial=0.1, @@ -824,7 +913,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.commit: gapic_v1.method_async.wrap_method( + self.commit: self._wrap_method( self.commit, default_retry=retries.AsyncRetry( initial=0.1, @@ -839,7 +928,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.rollback: gapic_v1.method_async.wrap_method( + self.rollback: self._wrap_method( self.rollback, default_retry=retries.AsyncRetry( initial=0.1, @@ -856,7 +945,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.run_query: gapic_v1.method_async.wrap_method( + self.run_query: self._wrap_method( self.run_query, default_retry=retries.AsyncRetry( initial=0.1, @@ -873,7 +962,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=300.0, client_info=client_info, ), - self.run_aggregation_query: gapic_v1.method_async.wrap_method( + self.run_aggregation_query: self._wrap_method( self.run_aggregation_query, default_retry=retries.AsyncRetry( initial=0.1, @@ -890,7 +979,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=300.0, client_info=client_info, ), - self.partition_query: gapic_v1.method_async.wrap_method( + self.partition_query: self._wrap_method( self.partition_query, default_retry=retries.AsyncRetry( initial=0.1, @@ -907,12 +996,12 @@ def _prep_wrapped_messages(self, client_info): default_timeout=300.0, client_info=client_info, ), - self.write: gapic_v1.method_async.wrap_method( + self.write: self._wrap_method( self.write, default_timeout=86400.0, client_info=client_info, ), - self.listen: gapic_v1.method_async.wrap_method( + self.listen: self._wrap_method( self.listen, default_retry=retries.AsyncRetry( initial=0.1, @@ -929,7 +1018,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=86400.0, client_info=client_info, ), - self.list_collection_ids: gapic_v1.method_async.wrap_method( + self.list_collection_ids: self._wrap_method( self.list_collection_ids, default_retry=retries.AsyncRetry( initial=0.1, @@ -946,7 +1035,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.batch_write: gapic_v1.method_async.wrap_method( + self.batch_write: self._wrap_method( self.batch_write, default_retry=retries.AsyncRetry( initial=0.1, @@ -962,7 +1051,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.create_document: gapic_v1.method_async.wrap_method( + self.create_document: self._wrap_method( self.create_document, default_retry=retries.AsyncRetry( initial=0.1, @@ -977,10 +1066,39 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" @property def delete_operation( @@ -992,7 +1110,7 @@ def delete_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + self._stubs["delete_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/DeleteOperation", request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, response_deserializer=None, @@ -1009,7 +1127,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -1026,7 +1144,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -1045,7 +1163,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/google/cloud/firestore_v1/services/firestore/transports/rest.py b/google/cloud/firestore_v1/services/firestore/transports/rest.py index c85f4f2ed2..31546b37ea 100644 --- a/google/cloud/firestore_v1/services/firestore/transports/rest.py +++ b/google/cloud/firestore_v1/services/firestore/transports/rest.py @@ -13,32 +13,25 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging +import json # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries from google.api_core import rest_helpers from google.api_core import rest_streaming -from google.api_core import path_template from google.api_core import gapic_v1 from google.protobuf import json_format from google.cloud.location import locations_pb2 # type: ignore + from requests import __version__ as requests_version import dataclasses -import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - from google.cloud.firestore_v1.types import document from google.cloud.firestore_v1.types import document as gf_document @@ -46,13 +39,28 @@ from google.protobuf import empty_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore -from .base import FirestoreTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseFirestoreRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=requests_version, + rest_version=f"requests@{requests_version}", ) @@ -184,8 +192,10 @@ def post_update_document(self, response): def pre_batch_get_documents( self, request: firestore.BatchGetDocumentsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore.BatchGetDocumentsRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore.BatchGetDocumentsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for batch_get_documents Override in a subclass to manipulate the request or metadata @@ -205,8 +215,10 @@ def post_batch_get_documents( return response def pre_batch_write( - self, request: firestore.BatchWriteRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[firestore.BatchWriteRequest, Sequence[Tuple[str, str]]]: + self, + request: firestore.BatchWriteRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[firestore.BatchWriteRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for batch_write Override in a subclass to manipulate the request or metadata @@ -228,8 +240,10 @@ def post_batch_write( def pre_begin_transaction( self, request: firestore.BeginTransactionRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore.BeginTransactionRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore.BeginTransactionRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for begin_transaction Override in a subclass to manipulate the request or metadata @@ -249,8 +263,10 @@ def post_begin_transaction( return response def pre_commit( - self, request: firestore.CommitRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[firestore.CommitRequest, Sequence[Tuple[str, str]]]: + self, + request: firestore.CommitRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[firestore.CommitRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for commit Override in a subclass to manipulate the request or metadata @@ -272,8 +288,10 @@ def post_commit( def pre_create_document( self, request: firestore.CreateDocumentRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore.CreateDocumentRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore.CreateDocumentRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for create_document Override in a subclass to manipulate the request or metadata @@ -293,8 +311,10 @@ def post_create_document(self, response: document.Document) -> document.Document def pre_delete_document( self, request: firestore.DeleteDocumentRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore.DeleteDocumentRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore.DeleteDocumentRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_document Override in a subclass to manipulate the request or metadata @@ -303,8 +323,10 @@ def pre_delete_document( return request, metadata def pre_get_document( - self, request: firestore.GetDocumentRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[firestore.GetDocumentRequest, Sequence[Tuple[str, str]]]: + self, + request: firestore.GetDocumentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[firestore.GetDocumentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_document Override in a subclass to manipulate the request or metadata @@ -324,8 +346,10 @@ def post_get_document(self, response: document.Document) -> document.Document: def pre_list_collection_ids( self, request: firestore.ListCollectionIdsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore.ListCollectionIdsRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore.ListCollectionIdsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_collection_ids Override in a subclass to manipulate the request or metadata @@ -347,8 +371,8 @@ def post_list_collection_ids( def pre_list_documents( self, request: firestore.ListDocumentsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore.ListDocumentsRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[firestore.ListDocumentsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_documents Override in a subclass to manipulate the request or metadata @@ -370,8 +394,10 @@ def post_list_documents( def pre_partition_query( self, request: firestore.PartitionQueryRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore.PartitionQueryRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore.PartitionQueryRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for partition_query Override in a subclass to manipulate the request or metadata @@ -391,8 +417,10 @@ def post_partition_query( return response def pre_rollback( - self, request: firestore.RollbackRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[firestore.RollbackRequest, Sequence[Tuple[str, str]]]: + self, + request: firestore.RollbackRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[firestore.RollbackRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for rollback Override in a subclass to manipulate the request or metadata @@ -403,8 +431,10 @@ def pre_rollback( def pre_run_aggregation_query( self, request: firestore.RunAggregationQueryRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore.RunAggregationQueryRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore.RunAggregationQueryRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for run_aggregation_query Override in a subclass to manipulate the request or metadata @@ -424,8 +454,10 @@ def post_run_aggregation_query( return response def pre_run_query( - self, request: firestore.RunQueryRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[firestore.RunQueryRequest, Sequence[Tuple[str, str]]]: + self, + request: firestore.RunQueryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[firestore.RunQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for run_query Override in a subclass to manipulate the request or metadata @@ -447,8 +479,10 @@ def post_run_query( def pre_update_document( self, request: firestore.UpdateDocumentRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[firestore.UpdateDocumentRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore.UpdateDocumentRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for update_document Override in a subclass to manipulate the request or metadata @@ -470,8 +504,10 @@ def post_update_document( def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for cancel_operation Override in a subclass to manipulate the request or metadata @@ -491,8 +527,10 @@ def post_cancel_operation(self, response: None) -> None: def pre_delete_operation( self, request: operations_pb2.DeleteOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_operation Override in a subclass to manipulate the request or metadata @@ -512,8 +550,10 @@ def post_delete_operation(self, response: None) -> None: def pre_get_operation( self, request: operations_pb2.GetOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -535,8 +575,10 @@ def post_get_operation( def pre_list_operations( self, request: operations_pb2.ListOperationsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_operations Override in a subclass to manipulate the request or metadata @@ -563,8 +605,8 @@ class FirestoreRestStub: _interceptor: FirestoreRestInterceptor -class FirestoreRestTransport(FirestoreTransport): - """REST backend transport for Firestore. +class FirestoreRestTransport(_BaseFirestoreRestTransport): + """REST backend synchronous transport for Firestore. The Cloud Firestore service. @@ -581,7 +623,6 @@ class FirestoreRestTransport(FirestoreTransport): and call it. It sends JSON representations of protocol buffers over HTTP/1.1 - """ def __init__( @@ -635,21 +676,12 @@ def __init__( # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - super().__init__( host=host, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, api_audience=api_audience, ) self._session = AuthorizedSession( @@ -660,19 +692,35 @@ def __init__( self._interceptor = interceptor or FirestoreRestInterceptor() self._prep_wrapped_messages(client_info) - class _BatchGetDocuments(FirestoreRestStub): + class _BatchGetDocuments( + _BaseFirestoreRestTransport._BaseBatchGetDocuments, FirestoreRestStub + ): def __hash__(self): - return hash("BatchGetDocuments") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreRestTransport.BatchGetDocuments") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + stream=True, + ) + return response def __call__( self, @@ -680,7 +728,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> rest_streaming.ResponseIterator: r"""Call the batch get documents method over HTTP. @@ -691,8 +739,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.firestore.BatchGetDocumentsResponse: @@ -701,47 +751,62 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{database=projects/*/databases/*}/documents:batchGet", - "body": "*", - }, - ] + http_options = ( + _BaseFirestoreRestTransport._BaseBatchGetDocuments._get_http_options() + ) + request, metadata = self._interceptor.pre_batch_get_documents( request, metadata ) - pb_request = firestore.BatchGetDocumentsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseFirestoreRestTransport._BaseBatchGetDocuments._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseFirestoreRestTransport._BaseBatchGetDocuments._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseFirestoreRestTransport._BaseBatchGetDocuments._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore_v1.FirestoreClient.BatchGetDocuments", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "BatchGetDocuments", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = FirestoreRestTransport._BatchGetDocuments._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -753,22 +818,36 @@ def __call__( resp = rest_streaming.ResponseIterator( response, firestore.BatchGetDocumentsResponse ) + resp = self._interceptor.post_batch_get_documents(resp) return resp - class _BatchWrite(FirestoreRestStub): + class _BatchWrite(_BaseFirestoreRestTransport._BaseBatchWrite, FirestoreRestStub): def __hash__(self): - return hash("BatchWrite") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreRestTransport.BatchWrite") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -776,7 +855,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> firestore.BatchWriteResponse: r"""Call the batch write method over HTTP. @@ -787,8 +866,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.firestore.BatchWriteResponse: @@ -797,45 +878,64 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{database=projects/*/databases/*}/documents:batchWrite", - "body": "*", - }, - ] - request, metadata = self._interceptor.pre_batch_write(request, metadata) - pb_request = firestore.BatchWriteRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = ( + _BaseFirestoreRestTransport._BaseBatchWrite._get_http_options() + ) - # Jsonify the request body + request, metadata = self._interceptor.pre_batch_write(request, metadata) + transcoded_request = ( + _BaseFirestoreRestTransport._BaseBatchWrite._get_transcoded_request( + http_options, request + ) + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseFirestoreRestTransport._BaseBatchWrite._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseFirestoreRestTransport._BaseBatchWrite._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore_v1.FirestoreClient.BatchWrite", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "BatchWrite", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = FirestoreRestTransport._BatchWrite._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -848,22 +948,59 @@ def __call__( pb_resp = firestore.BatchWriteResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_write(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = firestore.BatchWriteResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore_v1.FirestoreClient.batch_write", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "BatchWrite", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _BeginTransaction(FirestoreRestStub): + class _BeginTransaction( + _BaseFirestoreRestTransport._BaseBeginTransaction, FirestoreRestStub + ): def __hash__(self): - return hash("BeginTransaction") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreRestTransport.BeginTransaction") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -871,7 +1008,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> firestore.BeginTransactionResponse: r"""Call the begin transaction method over HTTP. @@ -882,8 +1019,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.firestore.BeginTransactionResponse: @@ -892,47 +1031,62 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{database=projects/*/databases/*}/documents:beginTransaction", - "body": "*", - }, - ] + http_options = ( + _BaseFirestoreRestTransport._BaseBeginTransaction._get_http_options() + ) + request, metadata = self._interceptor.pre_begin_transaction( request, metadata ) - pb_request = firestore.BeginTransactionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseFirestoreRestTransport._BaseBeginTransaction._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseFirestoreRestTransport._BaseBeginTransaction._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseFirestoreRestTransport._BaseBeginTransaction._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore_v1.FirestoreClient.BeginTransaction", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "BeginTransaction", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = FirestoreRestTransport._BeginTransaction._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -945,22 +1099,59 @@ def __call__( pb_resp = firestore.BeginTransactionResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_begin_transaction(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = firestore.BeginTransactionResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore_v1.FirestoreClient.begin_transaction", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "BeginTransaction", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _Commit(FirestoreRestStub): + class _Commit(_BaseFirestoreRestTransport._BaseCommit, FirestoreRestStub): def __hash__(self): - return hash("Commit") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreRestTransport.Commit") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -968,7 +1159,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> firestore.CommitResponse: r"""Call the commit method over HTTP. @@ -979,8 +1170,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.firestore.CommitResponse: @@ -989,45 +1182,62 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{database=projects/*/databases/*}/documents:commit", - "body": "*", - }, - ] - request, metadata = self._interceptor.pre_commit(request, metadata) - pb_request = firestore.CommitRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = _BaseFirestoreRestTransport._BaseCommit._get_http_options() - # Jsonify the request body + request, metadata = self._interceptor.pre_commit(request, metadata) + transcoded_request = ( + _BaseFirestoreRestTransport._BaseCommit._get_transcoded_request( + http_options, request + ) + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseFirestoreRestTransport._BaseCommit._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseFirestoreRestTransport._BaseCommit._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore_v1.FirestoreClient.Commit", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "Commit", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = FirestoreRestTransport._Commit._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1040,22 +1250,59 @@ def __call__( pb_resp = firestore.CommitResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_commit(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = firestore.CommitResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore_v1.FirestoreClient.commit", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "Commit", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _CreateDocument(FirestoreRestStub): + class _CreateDocument( + _BaseFirestoreRestTransport._BaseCreateDocument, FirestoreRestStub + ): def __hash__(self): - return hash("CreateDocument") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreRestTransport.CreateDocument") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -1063,7 +1310,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> document.Document: r"""Call the create document method over HTTP. @@ -1074,8 +1321,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.document.Document: @@ -1085,45 +1334,66 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{parent=projects/*/databases/*/documents/**}/{collection_id}", - "body": "document", - }, - ] - request, metadata = self._interceptor.pre_create_document(request, metadata) - pb_request = firestore.CreateDocumentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = ( + _BaseFirestoreRestTransport._BaseCreateDocument._get_http_options() + ) - # Jsonify the request body + request, metadata = self._interceptor.pre_create_document(request, metadata) + transcoded_request = ( + _BaseFirestoreRestTransport._BaseCreateDocument._get_transcoded_request( + http_options, request + ) + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = ( + _BaseFirestoreRestTransport._BaseCreateDocument._get_request_body_json( + transcoded_request + ) ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseFirestoreRestTransport._BaseCreateDocument._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore_v1.FirestoreClient.CreateDocument", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "CreateDocument", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = FirestoreRestTransport._CreateDocument._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1136,22 +1406,58 @@ def __call__( pb_resp = document.Document.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_document(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = document.Document.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore_v1.FirestoreClient.create_document", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "CreateDocument", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _DeleteDocument(FirestoreRestStub): + class _DeleteDocument( + _BaseFirestoreRestTransport._BaseDeleteDocument, FirestoreRestStub + ): def __hash__(self): - return hash("DeleteDocument") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreRestTransport.DeleteDocument") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -1159,7 +1465,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ): r"""Call the delete document method over HTTP. @@ -1170,42 +1476,65 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ - http_options: List[Dict[str, str]] = [ - { - "method": "delete", - "uri": "/v1/{name=projects/*/databases/*/documents/*/**}", - }, - ] - request, metadata = self._interceptor.pre_delete_document(request, metadata) - pb_request = firestore.DeleteDocumentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = ( + _BaseFirestoreRestTransport._BaseDeleteDocument._get_http_options() + ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] + request, metadata = self._interceptor.pre_delete_document(request, metadata) + transcoded_request = ( + _BaseFirestoreRestTransport._BaseDeleteDocument._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseFirestoreRestTransport._BaseDeleteDocument._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore_v1.FirestoreClient.DeleteDocument", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "DeleteDocument", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = FirestoreRestTransport._DeleteDocument._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1213,19 +1542,31 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _GetDocument(FirestoreRestStub): + class _GetDocument(_BaseFirestoreRestTransport._BaseGetDocument, FirestoreRestStub): def __hash__(self): - return hash("GetDocument") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreRestTransport.GetDocument") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -1233,7 +1574,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> document.Document: r"""Call the get document method over HTTP. @@ -1244,8 +1585,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.document.Document: @@ -1255,38 +1598,59 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/databases/*/documents/*/**}", - }, - ] - request, metadata = self._interceptor.pre_get_document(request, metadata) - pb_request = firestore.GetDocumentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = ( + _BaseFirestoreRestTransport._BaseGetDocument._get_http_options() + ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] + request, metadata = self._interceptor.pre_get_document(request, metadata) + transcoded_request = ( + _BaseFirestoreRestTransport._BaseGetDocument._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseFirestoreRestTransport._BaseGetDocument._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore_v1.FirestoreClient.GetDocument", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "GetDocument", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = FirestoreRestTransport._GetDocument._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1299,22 +1663,59 @@ def __call__( pb_resp = document.Document.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_document(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = document.Document.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore_v1.FirestoreClient.get_document", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "GetDocument", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _ListCollectionIds(FirestoreRestStub): + class _ListCollectionIds( + _BaseFirestoreRestTransport._BaseListCollectionIds, FirestoreRestStub + ): def __hash__(self): - return hash("ListCollectionIds") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreRestTransport.ListCollectionIds") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -1322,7 +1723,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> firestore.ListCollectionIdsResponse: r"""Call the list collection ids method over HTTP. @@ -1333,8 +1734,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.firestore.ListCollectionIdsResponse: @@ -1343,52 +1746,62 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{parent=projects/*/databases/*/documents}:listCollectionIds", - "body": "*", - }, - { - "method": "post", - "uri": "/v1/{parent=projects/*/databases/*/documents/*/**}:listCollectionIds", - "body": "*", - }, - ] + http_options = ( + _BaseFirestoreRestTransport._BaseListCollectionIds._get_http_options() + ) + request, metadata = self._interceptor.pre_list_collection_ids( request, metadata ) - pb_request = firestore.ListCollectionIdsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseFirestoreRestTransport._BaseListCollectionIds._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseFirestoreRestTransport._BaseListCollectionIds._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseFirestoreRestTransport._BaseListCollectionIds._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore_v1.FirestoreClient.ListCollectionIds", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "ListCollectionIds", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = FirestoreRestTransport._ListCollectionIds._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1401,22 +1814,60 @@ def __call__( pb_resp = firestore.ListCollectionIdsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_collection_ids(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = firestore.ListCollectionIdsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore_v1.FirestoreClient.list_collection_ids", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "ListCollectionIds", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _ListDocuments(FirestoreRestStub): + class _ListDocuments( + _BaseFirestoreRestTransport._BaseListDocuments, FirestoreRestStub + ): def __hash__(self): - return hash("ListDocuments") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreRestTransport.ListDocuments") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -1424,7 +1875,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> firestore.ListDocumentsResponse: r"""Call the list documents method over HTTP. @@ -1435,8 +1886,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.firestore.ListDocumentsResponse: @@ -1445,42 +1898,59 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{parent=projects/*/databases/*/documents/*/**}/{collection_id}", - }, - { - "method": "get", - "uri": "/v1/{parent=projects/*/databases/*/documents}/{collection_id}", - }, - ] - request, metadata = self._interceptor.pre_list_documents(request, metadata) - pb_request = firestore.ListDocumentsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = ( + _BaseFirestoreRestTransport._BaseListDocuments._get_http_options() + ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] + request, metadata = self._interceptor.pre_list_documents(request, metadata) + transcoded_request = ( + _BaseFirestoreRestTransport._BaseListDocuments._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseFirestoreRestTransport._BaseListDocuments._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore_v1.FirestoreClient.ListDocuments", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "ListDocuments", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = FirestoreRestTransport._ListDocuments._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1493,12 +1963,34 @@ def __call__( pb_resp = firestore.ListDocumentsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_documents(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = firestore.ListDocumentsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore_v1.FirestoreClient.list_documents", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "ListDocuments", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _Listen(FirestoreRestStub): + class _Listen(_BaseFirestoreRestTransport._BaseListen, FirestoreRestStub): def __hash__(self): - return hash("Listen") + return hash("FirestoreRestTransport.Listen") def __call__( self, @@ -1506,25 +1998,40 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> rest_streaming.ResponseIterator: raise NotImplementedError( "Method Listen is not available over REST transport" ) - class _PartitionQuery(FirestoreRestStub): + class _PartitionQuery( + _BaseFirestoreRestTransport._BasePartitionQuery, FirestoreRestStub + ): def __hash__(self): - return hash("PartitionQuery") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreRestTransport.PartitionQuery") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -1532,7 +2039,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> firestore.PartitionQueryResponse: r"""Call the partition query method over HTTP. @@ -1543,8 +2050,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.firestore.PartitionQueryResponse: @@ -1553,50 +2062,66 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{parent=projects/*/databases/*/documents}:partitionQuery", - "body": "*", - }, - { - "method": "post", - "uri": "/v1/{parent=projects/*/databases/*/documents/*/**}:partitionQuery", - "body": "*", - }, - ] - request, metadata = self._interceptor.pre_partition_query(request, metadata) - pb_request = firestore.PartitionQueryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = ( + _BaseFirestoreRestTransport._BasePartitionQuery._get_http_options() + ) - # Jsonify the request body + request, metadata = self._interceptor.pre_partition_query(request, metadata) + transcoded_request = ( + _BaseFirestoreRestTransport._BasePartitionQuery._get_transcoded_request( + http_options, request + ) + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = ( + _BaseFirestoreRestTransport._BasePartitionQuery._get_request_body_json( + transcoded_request + ) ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseFirestoreRestTransport._BasePartitionQuery._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore_v1.FirestoreClient.PartitionQuery", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "PartitionQuery", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = FirestoreRestTransport._PartitionQuery._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1609,22 +2134,59 @@ def __call__( pb_resp = firestore.PartitionQueryResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_partition_query(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = firestore.PartitionQueryResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore_v1.FirestoreClient.partition_query", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "PartitionQuery", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _Rollback(FirestoreRestStub): + class _Rollback(_BaseFirestoreRestTransport._BaseRollback, FirestoreRestStub): def __hash__(self): - return hash("Rollback") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreRestTransport.Rollback") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -1632,7 +2194,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ): r"""Call the rollback method over HTTP. @@ -1643,49 +2205,68 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{database=projects/*/databases/*}/documents:rollback", - "body": "*", - }, - ] - request, metadata = self._interceptor.pre_rollback(request, metadata) - pb_request = firestore.RollbackRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = _BaseFirestoreRestTransport._BaseRollback._get_http_options() - # Jsonify the request body + request, metadata = self._interceptor.pre_rollback(request, metadata) + transcoded_request = ( + _BaseFirestoreRestTransport._BaseRollback._get_transcoded_request( + http_options, request + ) + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseFirestoreRestTransport._BaseRollback._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseFirestoreRestTransport._BaseRollback._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore_v1.FirestoreClient.Rollback", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "Rollback", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = FirestoreRestTransport._Rollback._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1693,19 +2274,35 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _RunAggregationQuery(FirestoreRestStub): + class _RunAggregationQuery( + _BaseFirestoreRestTransport._BaseRunAggregationQuery, FirestoreRestStub + ): def __hash__(self): - return hash("RunAggregationQuery") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreRestTransport.RunAggregationQuery") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + stream=True, + ) + return response def __call__( self, @@ -1713,7 +2310,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> rest_streaming.ResponseIterator: r"""Call the run aggregation query method over HTTP. @@ -1724,8 +2321,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.firestore.RunAggregationQueryResponse: @@ -1734,52 +2333,62 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{parent=projects/*/databases/*/documents}:runAggregationQuery", - "body": "*", - }, - { - "method": "post", - "uri": "/v1/{parent=projects/*/databases/*/documents/*/**}:runAggregationQuery", - "body": "*", - }, - ] + http_options = ( + _BaseFirestoreRestTransport._BaseRunAggregationQuery._get_http_options() + ) + request, metadata = self._interceptor.pre_run_aggregation_query( request, metadata ) - pb_request = firestore.RunAggregationQueryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseFirestoreRestTransport._BaseRunAggregationQuery._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseFirestoreRestTransport._BaseRunAggregationQuery._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseFirestoreRestTransport._BaseRunAggregationQuery._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore_v1.FirestoreClient.RunAggregationQuery", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "RunAggregationQuery", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = FirestoreRestTransport._RunAggregationQuery._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1791,22 +2400,37 @@ def __call__( resp = rest_streaming.ResponseIterator( response, firestore.RunAggregationQueryResponse ) + resp = self._interceptor.post_run_aggregation_query(resp) return resp - class _RunQuery(FirestoreRestStub): + class _RunQuery(_BaseFirestoreRestTransport._BaseRunQuery, FirestoreRestStub): def __hash__(self): - return hash("RunQuery") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreRestTransport.RunQuery") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + stream=True, + ) + return response def __call__( self, @@ -1814,7 +2438,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> rest_streaming.ResponseIterator: r"""Call the run query method over HTTP. @@ -1825,8 +2449,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.firestore.RunQueryResponse: @@ -1835,50 +2461,62 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{parent=projects/*/databases/*/documents}:runQuery", - "body": "*", - }, - { - "method": "post", - "uri": "/v1/{parent=projects/*/databases/*/documents/*/**}:runQuery", - "body": "*", - }, - ] - request, metadata = self._interceptor.pre_run_query(request, metadata) - pb_request = firestore.RunQueryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = _BaseFirestoreRestTransport._BaseRunQuery._get_http_options() - # Jsonify the request body + request, metadata = self._interceptor.pre_run_query(request, metadata) + transcoded_request = ( + _BaseFirestoreRestTransport._BaseRunQuery._get_transcoded_request( + http_options, request + ) + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseFirestoreRestTransport._BaseRunQuery._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseFirestoreRestTransport._BaseRunQuery._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore_v1.FirestoreClient.RunQuery", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "RunQuery", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = FirestoreRestTransport._RunQuery._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1888,22 +2526,38 @@ def __call__( # Return the response resp = rest_streaming.ResponseIterator(response, firestore.RunQueryResponse) + resp = self._interceptor.post_run_query(resp) return resp - class _UpdateDocument(FirestoreRestStub): + class _UpdateDocument( + _BaseFirestoreRestTransport._BaseUpdateDocument, FirestoreRestStub + ): def __hash__(self): - return hash("UpdateDocument") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("FirestoreRestTransport.UpdateDocument") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -1911,7 +2565,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gf_document.Document: r"""Call the update document method over HTTP. @@ -1922,8 +2576,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.gf_document.Document: @@ -1933,45 +2589,66 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "patch", - "uri": "/v1/{document.name=projects/*/databases/*/documents/*/**}", - "body": "document", - }, - ] - request, metadata = self._interceptor.pre_update_document(request, metadata) - pb_request = firestore.UpdateDocumentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = ( + _BaseFirestoreRestTransport._BaseUpdateDocument._get_http_options() + ) - # Jsonify the request body + request, metadata = self._interceptor.pre_update_document(request, metadata) + transcoded_request = ( + _BaseFirestoreRestTransport._BaseUpdateDocument._get_transcoded_request( + http_options, request + ) + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = ( + _BaseFirestoreRestTransport._BaseUpdateDocument._get_request_body_json( + transcoded_request + ) ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseFirestoreRestTransport._BaseUpdateDocument._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore_v1.FirestoreClient.UpdateDocument", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "UpdateDocument", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = FirestoreRestTransport._UpdateDocument._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1984,12 +2661,34 @@ def __call__( pb_resp = gf_document.Document.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_document(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = gf_document.Document.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore_v1.FirestoreClient.update_document", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "UpdateDocument", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _Write(FirestoreRestStub): + class _Write(_BaseFirestoreRestTransport._BaseWrite, FirestoreRestStub): def __hash__(self): - return hash("Write") + return hash("FirestoreRestTransport.Write") def __call__( self, @@ -1997,7 +2696,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> rest_streaming.ResponseIterator: raise NotImplementedError( "Method Write is not available over REST transport" @@ -2135,14 +2834,42 @@ def write(self) -> Callable[[firestore.WriteRequest], firestore.WriteResponse]: def cancel_operation(self): return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - class _CancelOperation(FirestoreRestStub): + class _CancelOperation( + _BaseFirestoreRestTransport._BaseCancelOperation, FirestoreRestStub + ): + def __hash__(self): + return hash("FirestoreRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + def __call__( self, request: operations_pb2.CancelOperationRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Call the cancel operation method over HTTP. @@ -2152,41 +2879,72 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{name=projects/*/databases/*/operations/*}:cancel", - "body": "*", - }, - ] + http_options = ( + _BaseFirestoreRestTransport._BaseCancelOperation._get_http_options() + ) request, metadata = self._interceptor.pre_cancel_operation( request, metadata ) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) + transcoded_request = _BaseFirestoreRestTransport._BaseCancelOperation._get_transcoded_request( + http_options, request + ) - body = json.dumps(transcoded_request["body"]) - uri = transcoded_request["uri"] - method = transcoded_request["method"] + body = ( + _BaseFirestoreRestTransport._BaseCancelOperation._get_request_body_json( + transcoded_request + ) + ) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) + query_params = ( + _BaseFirestoreRestTransport._BaseCancelOperation._get_query_params_json( + transcoded_request + ) + ) - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore_v1.FirestoreClient.CancelOperation", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, + # Send the request + response = FirestoreRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2200,14 +2958,41 @@ def __call__( def delete_operation(self): return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - class _DeleteOperation(FirestoreRestStub): + class _DeleteOperation( + _BaseFirestoreRestTransport._BaseDeleteOperation, FirestoreRestStub + ): + def __hash__(self): + return hash("FirestoreRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__( self, request: operations_pb2.DeleteOperationRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Call the delete operation method over HTTP. @@ -2217,38 +3002,65 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ - http_options: List[Dict[str, str]] = [ - { - "method": "delete", - "uri": "/v1/{name=projects/*/databases/*/operations/*}", - }, - ] + http_options = ( + _BaseFirestoreRestTransport._BaseDeleteOperation._get_http_options() + ) request, metadata = self._interceptor.pre_delete_operation( request, metadata ) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = _BaseFirestoreRestTransport._BaseDeleteOperation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) + query_params = ( + _BaseFirestoreRestTransport._BaseDeleteOperation._get_query_params_json( + transcoded_request + ) + ) - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore_v1.FirestoreClient.DeleteOperation", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), + # Send the request + response = FirestoreRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2262,14 +3074,41 @@ def __call__( def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - class _GetOperation(FirestoreRestStub): + class _GetOperation( + _BaseFirestoreRestTransport._BaseGetOperation, FirestoreRestStub + ): + def __hash__(self): + return hash("FirestoreRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__( self, request: operations_pb2.GetOperationRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. @@ -2279,39 +3118,68 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.Operation: Response from GetOperation method. """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/databases/*/operations/*}", - }, - ] + http_options = ( + _BaseFirestoreRestTransport._BaseGetOperation._get_http_options() + ) request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = ( + _BaseFirestoreRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) + query_params = ( + _BaseFirestoreRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + ) - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore_v1.FirestoreClient.GetOperation", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), + # Send the request + response = FirestoreRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2319,23 +3187,72 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore_v1.FirestoreAsyncClient.GetOperation", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property def list_operations(self): return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - class _ListOperations(FirestoreRestStub): + class _ListOperations( + _BaseFirestoreRestTransport._BaseListOperations, FirestoreRestStub + ): + def __hash__(self): + return hash("FirestoreRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__( self, request: operations_pb2.ListOperationsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. @@ -2345,39 +3262,68 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.ListOperationsResponse: Response from ListOperations method. """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/databases/*}/operations", - }, - ] + http_options = ( + _BaseFirestoreRestTransport._BaseListOperations._get_http_options() + ) request, metadata = self._interceptor.pre_list_operations(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = ( + _BaseFirestoreRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) + query_params = ( + _BaseFirestoreRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + ) - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore_v1.FirestoreClient.ListOperations", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), + # Send the request + response = FirestoreRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2385,9 +3331,31 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore_v1.FirestoreAsyncClient.ListOperations", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property diff --git a/google/cloud/firestore_v1/services/firestore/transports/rest_base.py b/google/cloud/firestore_v1/services/firestore/transports/rest_base.py new file mode 100644 index 0000000000..0b55ef7f59 --- /dev/null +++ b/google/cloud/firestore_v1/services/firestore/transports/rest_base.py @@ -0,0 +1,1004 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.cloud.location import locations_pb2 # type: ignore +from .base import FirestoreTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.firestore_v1.types import document +from google.cloud.firestore_v1.types import document as gf_document +from google.cloud.firestore_v1.types import firestore +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + + +class _BaseFirestoreRestTransport(FirestoreTransport): + """Base REST backend transport for Firestore. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "firestore.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'firestore.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseBatchGetDocuments: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{database=projects/*/databases/*}/documents:batchGet", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore.BatchGetDocumentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreRestTransport._BaseBatchGetDocuments._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseBatchWrite: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{database=projects/*/databases/*}/documents:batchWrite", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore.BatchWriteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreRestTransport._BaseBatchWrite._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseBeginTransaction: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{database=projects/*/databases/*}/documents:beginTransaction", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore.BeginTransactionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreRestTransport._BaseBeginTransaction._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCommit: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{database=projects/*/databases/*}/documents:commit", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore.CommitRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreRestTransport._BaseCommit._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateDocument: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/databases/*/documents/**}/{collection_id}", + "body": "document", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore.CreateDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreRestTransport._BaseCreateDocument._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteDocument: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/databases/*/documents/*/**}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore.DeleteDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreRestTransport._BaseDeleteDocument._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetDocument: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/databases/*/documents/*/**}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore.GetDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreRestTransport._BaseGetDocument._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListCollectionIds: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/databases/*/documents}:listCollectionIds", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/databases/*/documents/*/**}:listCollectionIds", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore.ListCollectionIdsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreRestTransport._BaseListCollectionIds._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListDocuments: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/databases/*/documents/*/**}/{collection_id}", + }, + { + "method": "get", + "uri": "/v1/{parent=projects/*/databases/*/documents}/{collection_id}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore.ListDocumentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreRestTransport._BaseListDocuments._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListen: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + class _BasePartitionQuery: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/databases/*/documents}:partitionQuery", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/databases/*/documents/*/**}:partitionQuery", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore.PartitionQueryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreRestTransport._BasePartitionQuery._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseRollback: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{database=projects/*/databases/*}/documents:rollback", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore.RollbackRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreRestTransport._BaseRollback._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseRunAggregationQuery: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/databases/*/documents}:runAggregationQuery", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/databases/*/documents/*/**}:runAggregationQuery", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore.RunAggregationQueryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreRestTransport._BaseRunAggregationQuery._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseRunQuery: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/databases/*/documents}:runQuery", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/databases/*/documents/*/**}:runQuery", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore.RunQueryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreRestTransport._BaseRunQuery._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateDocument: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{document.name=projects/*/databases/*/documents/*/**}", + "body": "document", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore.UpdateDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreRestTransport._BaseUpdateDocument._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseWrite: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/databases/*/operations/*}:cancel", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/databases/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/databases/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/databases/*}/operations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseFirestoreRestTransport",) diff --git a/scripts/fixup_firestore_admin_v1_keywords.py b/scripts/fixup_firestore_admin_v1_keywords.py index 1c2d4ec8d8..1c24796702 100644 --- a/scripts/fixup_firestore_admin_v1_keywords.py +++ b/scripts/fixup_firestore_admin_v1_keywords.py @@ -60,7 +60,7 @@ class firestore_adminCallTransformer(cst.CSTTransformer): 'get_field': ('name', ), 'get_index': ('name', ), 'import_documents': ('name', 'collection_ids', 'input_uri_prefix', 'namespace_ids', ), - 'list_backups': ('parent', ), + 'list_backups': ('parent', 'filter', ), 'list_backup_schedules': ('parent', ), 'list_databases': ('parent', 'show_deleted', ), 'list_fields': ('parent', 'filter', 'page_size', 'page_token', ), diff --git a/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py b/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py index 8353d5b180..63e24e25f5 100644 --- a/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py +++ b/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py @@ -24,7 +24,7 @@ import grpc from grpc.experimental import aio -from collections.abc import Iterable +from collections.abc import Iterable, AsyncIterable from google.protobuf import json_format import json import math @@ -37,6 +37,13 @@ from requests.sessions import Session from google.protobuf import json_format +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + from google.api_core import client_options from google.api_core import exceptions as core_exceptions from google.api_core import future @@ -79,10 +86,24 @@ import google.auth +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. @@ -322,86 +343,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (FirestoreAdminClient, transports.FirestoreAdminGrpcTransport, "grpc"), - (FirestoreAdminClient, transports.FirestoreAdminRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1180,25 +1121,6 @@ def test_create_index(request_type, transport: str = "grpc"): assert isinstance(response, future.Future) -def test_create_index_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_index), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_index() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.CreateIndexRequest() - - def test_create_index_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1267,27 +1189,6 @@ def test_create_index_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_index_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_index), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.create_index() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.CreateIndexRequest() - - @pytest.mark.asyncio async def test_create_index_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -1296,7 +1197,7 @@ async def test_create_index_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1340,7 +1241,7 @@ async def test_create_index_async( transport: str = "grpc_asyncio", request_type=firestore_admin.CreateIndexRequest ): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1403,7 +1304,7 @@ def test_create_index_field_headers(): @pytest.mark.asyncio async def test_create_index_field_headers_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1478,7 +1379,7 @@ def test_create_index_flattened_error(): @pytest.mark.asyncio async def test_create_index_flattened_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1511,7 +1412,7 @@ async def test_create_index_flattened_async(): @pytest.mark.asyncio async def test_create_index_flattened_error_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1560,25 +1461,6 @@ def test_list_indexes(request_type, transport: str = "grpc"): assert response.next_page_token == "next_page_token_value" -def test_list_indexes_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_indexes() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListIndexesRequest() - - def test_list_indexes_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1646,29 +1528,6 @@ def test_list_indexes_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_indexes_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListIndexesResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_indexes() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListIndexesRequest() - - @pytest.mark.asyncio async def test_list_indexes_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -1677,7 +1536,7 @@ async def test_list_indexes_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1716,7 +1575,7 @@ async def test_list_indexes_async( transport: str = "grpc_asyncio", request_type=firestore_admin.ListIndexesRequest ): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1782,7 +1641,7 @@ def test_list_indexes_field_headers(): @pytest.mark.asyncio async def test_list_indexes_field_headers_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1852,7 +1711,7 @@ def test_list_indexes_flattened_error(): @pytest.mark.asyncio async def test_list_indexes_flattened_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1881,7 +1740,7 @@ async def test_list_indexes_flattened_async(): @pytest.mark.asyncio async def test_list_indexes_flattened_error_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1991,7 +1850,7 @@ def test_list_indexes_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_indexes_async_pager(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2041,7 +1900,7 @@ async def test_list_indexes_async_pager(): @pytest.mark.asyncio async def test_list_indexes_async_pages(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2129,25 +1988,6 @@ def test_get_index(request_type, transport: str = "grpc"): assert response.state == index.Index.State.CREATING -def test_get_index_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_index), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_index() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetIndexRequest() - - def test_get_index_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2211,39 +2051,13 @@ def test_get_index_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_index_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_index), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - index.Index( - name="name_value", - query_scope=index.Index.QueryScope.COLLECTION, - api_scope=index.Index.ApiScope.DATASTORE_MODE_API, - state=index.Index.State.CREATING, - ) - ) - response = await client.get_index() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetIndexRequest() - - @pytest.mark.asyncio async def test_get_index_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2282,7 +2096,7 @@ async def test_get_index_async( transport: str = "grpc_asyncio", request_type=firestore_admin.GetIndexRequest ): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2354,7 +2168,7 @@ def test_get_index_field_headers(): @pytest.mark.asyncio async def test_get_index_field_headers_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2422,7 +2236,7 @@ def test_get_index_flattened_error(): @pytest.mark.asyncio async def test_get_index_flattened_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2449,7 +2263,7 @@ async def test_get_index_flattened_async(): @pytest.mark.asyncio async def test_get_index_flattened_error_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2494,25 +2308,6 @@ def test_delete_index(request_type, transport: str = "grpc"): assert response is None -def test_delete_index_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_index), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_index() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DeleteIndexRequest() - - def test_delete_index_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2576,25 +2371,6 @@ def test_delete_index_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_index_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_index), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_index() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DeleteIndexRequest() - - @pytest.mark.asyncio async def test_delete_index_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -2603,7 +2379,7 @@ async def test_delete_index_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2642,7 +2418,7 @@ async def test_delete_index_async( transport: str = "grpc_asyncio", request_type=firestore_admin.DeleteIndexRequest ): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2703,7 +2479,7 @@ def test_delete_index_field_headers(): @pytest.mark.asyncio async def test_delete_index_field_headers_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2771,7 +2547,7 @@ def test_delete_index_flattened_error(): @pytest.mark.asyncio async def test_delete_index_flattened_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2798,7 +2574,7 @@ async def test_delete_index_flattened_async(): @pytest.mark.asyncio async def test_delete_index_flattened_error_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2846,25 +2622,6 @@ def test_get_field(request_type, transport: str = "grpc"): assert response.name == "name_value" -def test_get_field_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_field), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_field() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetFieldRequest() - - def test_get_field_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2928,36 +2685,13 @@ def test_get_field_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_field_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_field), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - field.Field( - name="name_value", - ) - ) - response = await client.get_field() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetFieldRequest() - - @pytest.mark.asyncio async def test_get_field_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2996,7 +2730,7 @@ async def test_get_field_async( transport: str = "grpc_asyncio", request_type=firestore_admin.GetFieldRequest ): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3062,7 +2796,7 @@ def test_get_field_field_headers(): @pytest.mark.asyncio async def test_get_field_field_headers_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3130,7 +2864,7 @@ def test_get_field_flattened_error(): @pytest.mark.asyncio async def test_get_field_flattened_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3157,7 +2891,7 @@ async def test_get_field_flattened_async(): @pytest.mark.asyncio async def test_get_field_flattened_error_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -3202,25 +2936,6 @@ def test_update_field(request_type, transport: str = "grpc"): assert isinstance(response, future.Future) -def test_update_field_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_field), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_field() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.UpdateFieldRequest() - - def test_update_field_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3285,27 +3000,6 @@ def test_update_field_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_field_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_field), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.update_field() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.UpdateFieldRequest() - - @pytest.mark.asyncio async def test_update_field_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -3314,7 +3008,7 @@ async def test_update_field_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3358,7 +3052,7 @@ async def test_update_field_async( transport: str = "grpc_asyncio", request_type=firestore_admin.UpdateFieldRequest ): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3421,7 +3115,7 @@ def test_update_field_field_headers(): @pytest.mark.asyncio async def test_update_field_field_headers_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3491,7 +3185,7 @@ def test_update_field_flattened_error(): @pytest.mark.asyncio async def test_update_field_flattened_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3520,7 +3214,7 @@ async def test_update_field_flattened_async(): @pytest.mark.asyncio async def test_update_field_flattened_error_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -3568,25 +3262,6 @@ def test_list_fields(request_type, transport: str = "grpc"): assert response.next_page_token == "next_page_token_value" -def test_list_fields_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_fields), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_fields() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListFieldsRequest() - - def test_list_fields_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3654,29 +3329,6 @@ def test_list_fields_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_fields_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_fields), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListFieldsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_fields() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListFieldsRequest() - - @pytest.mark.asyncio async def test_list_fields_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -3685,7 +3337,7 @@ async def test_list_fields_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3724,7 +3376,7 @@ async def test_list_fields_async( transport: str = "grpc_asyncio", request_type=firestore_admin.ListFieldsRequest ): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3790,7 +3442,7 @@ def test_list_fields_field_headers(): @pytest.mark.asyncio async def test_list_fields_field_headers_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3860,7 +3512,7 @@ def test_list_fields_flattened_error(): @pytest.mark.asyncio async def test_list_fields_flattened_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3889,7 +3541,7 @@ async def test_list_fields_flattened_async(): @pytest.mark.asyncio async def test_list_fields_flattened_error_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -3999,7 +3651,7 @@ def test_list_fields_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_fields_async_pager(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4049,7 +3701,7 @@ async def test_list_fields_async_pager(): @pytest.mark.asyncio async def test_list_fields_async_pages(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4128,25 +3780,6 @@ def test_export_documents(request_type, transport: str = "grpc"): assert isinstance(response, future.Future) -def test_export_documents_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.export_documents), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.export_documents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ExportDocumentsRequest() - - def test_export_documents_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4219,27 +3852,6 @@ def test_export_documents_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_export_documents_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.export_documents), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.export_documents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ExportDocumentsRequest() - - @pytest.mark.asyncio async def test_export_documents_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -4248,7 +3860,7 @@ async def test_export_documents_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4292,7 +3904,7 @@ async def test_export_documents_async( transport: str = "grpc_asyncio", request_type=firestore_admin.ExportDocumentsRequest ): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4355,7 +3967,7 @@ def test_export_documents_field_headers(): @pytest.mark.asyncio async def test_export_documents_field_headers_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4425,7 +4037,7 @@ def test_export_documents_flattened_error(): @pytest.mark.asyncio async def test_export_documents_flattened_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4454,7 +4066,7 @@ async def test_export_documents_flattened_async(): @pytest.mark.asyncio async def test_export_documents_flattened_error_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -4499,25 +4111,6 @@ def test_import_documents(request_type, transport: str = "grpc"): assert isinstance(response, future.Future) -def test_import_documents_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.import_documents), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.import_documents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ImportDocumentsRequest() - - def test_import_documents_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4590,27 +4183,6 @@ def test_import_documents_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_import_documents_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.import_documents), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.import_documents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ImportDocumentsRequest() - - @pytest.mark.asyncio async def test_import_documents_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -4619,7 +4191,7 @@ async def test_import_documents_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4663,7 +4235,7 @@ async def test_import_documents_async( transport: str = "grpc_asyncio", request_type=firestore_admin.ImportDocumentsRequest ): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4726,7 +4298,7 @@ def test_import_documents_field_headers(): @pytest.mark.asyncio async def test_import_documents_field_headers_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4796,7 +4368,7 @@ def test_import_documents_flattened_error(): @pytest.mark.asyncio async def test_import_documents_flattened_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4825,7 +4397,7 @@ async def test_import_documents_flattened_async(): @pytest.mark.asyncio async def test_import_documents_flattened_error_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -4872,27 +4444,6 @@ def test_bulk_delete_documents(request_type, transport: str = "grpc"): assert isinstance(response, future.Future) -def test_bulk_delete_documents_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.bulk_delete_documents), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.bulk_delete_documents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.BulkDeleteDocumentsRequest() - - def test_bulk_delete_documents_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4968,29 +4519,6 @@ def test_bulk_delete_documents_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_bulk_delete_documents_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.bulk_delete_documents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.bulk_delete_documents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.BulkDeleteDocumentsRequest() - - @pytest.mark.asyncio async def test_bulk_delete_documents_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -4999,7 +4527,7 @@ async def test_bulk_delete_documents_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5044,7 +4572,7 @@ async def test_bulk_delete_documents_async( request_type=firestore_admin.BulkDeleteDocumentsRequest, ): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5111,7 +4639,7 @@ def test_bulk_delete_documents_field_headers(): @pytest.mark.asyncio async def test_bulk_delete_documents_field_headers_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5185,7 +4713,7 @@ def test_bulk_delete_documents_flattened_error(): @pytest.mark.asyncio async def test_bulk_delete_documents_flattened_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5216,7 +4744,7 @@ async def test_bulk_delete_documents_flattened_async(): @pytest.mark.asyncio async def test_bulk_delete_documents_flattened_error_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -5261,25 +4789,6 @@ def test_create_database(request_type, transport: str = "grpc"): assert isinstance(response, future.Future) -def test_create_database_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_database), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_database() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.CreateDatabaseRequest() - - def test_create_database_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -5350,27 +4859,6 @@ def test_create_database_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_database_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.create_database() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.CreateDatabaseRequest() - - @pytest.mark.asyncio async def test_create_database_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -5379,7 +4867,7 @@ async def test_create_database_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5423,7 +4911,7 @@ async def test_create_database_async( transport: str = "grpc_asyncio", request_type=firestore_admin.CreateDatabaseRequest ): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5486,7 +4974,7 @@ def test_create_database_field_headers(): @pytest.mark.asyncio async def test_create_database_field_headers_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5566,7 +5054,7 @@ def test_create_database_flattened_error(): @pytest.mark.asyncio async def test_create_database_flattened_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5603,7 +5091,7 @@ async def test_create_database_flattened_async(): @pytest.mark.asyncio async def test_create_database_flattened_error_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -5682,25 +5170,6 @@ def test_get_database(request_type, transport: str = "grpc"): assert response.etag == "etag_value" -def test_get_database_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_database), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_database() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetDatabaseRequest() - - def test_get_database_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -5764,39 +5233,6 @@ def test_get_database_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_database_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - database.Database( - name="name_value", - uid="uid_value", - location_id="location_id_value", - type_=database.Database.DatabaseType.FIRESTORE_NATIVE, - concurrency_mode=database.Database.ConcurrencyMode.OPTIMISTIC, - point_in_time_recovery_enablement=database.Database.PointInTimeRecoveryEnablement.POINT_IN_TIME_RECOVERY_ENABLED, - app_engine_integration_mode=database.Database.AppEngineIntegrationMode.ENABLED, - key_prefix="key_prefix_value", - delete_protection_state=database.Database.DeleteProtectionState.DELETE_PROTECTION_DISABLED, - previous_id="previous_id_value", - etag="etag_value", - ) - ) - response = await client.get_database() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetDatabaseRequest() - - @pytest.mark.asyncio async def test_get_database_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -5805,7 +5241,7 @@ async def test_get_database_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5844,7 +5280,7 @@ async def test_get_database_async( transport: str = "grpc_asyncio", request_type=firestore_admin.GetDatabaseRequest ): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5939,7 +5375,7 @@ def test_get_database_field_headers(): @pytest.mark.asyncio async def test_get_database_field_headers_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6007,7 +5443,7 @@ def test_get_database_flattened_error(): @pytest.mark.asyncio async def test_get_database_flattened_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6034,7 +5470,7 @@ async def test_get_database_flattened_async(): @pytest.mark.asyncio async def test_get_database_flattened_error_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -6082,25 +5518,6 @@ def test_list_databases(request_type, transport: str = "grpc"): assert response.unreachable == ["unreachable_value"] -def test_list_databases_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_databases), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_databases() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListDatabasesRequest() - - def test_list_databases_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -6164,29 +5581,6 @@ def test_list_databases_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_databases_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_databases), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListDatabasesResponse( - unreachable=["unreachable_value"], - ) - ) - response = await client.list_databases() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListDatabasesRequest() - - @pytest.mark.asyncio async def test_list_databases_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -6195,7 +5589,7 @@ async def test_list_databases_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6234,7 +5628,7 @@ async def test_list_databases_async( transport: str = "grpc_asyncio", request_type=firestore_admin.ListDatabasesRequest ): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6300,7 +5694,7 @@ def test_list_databases_field_headers(): @pytest.mark.asyncio async def test_list_databases_field_headers_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6370,7 +5764,7 @@ def test_list_databases_flattened_error(): @pytest.mark.asyncio async def test_list_databases_flattened_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6399,7 +5793,7 @@ async def test_list_databases_flattened_async(): @pytest.mark.asyncio async def test_list_databases_flattened_error_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -6444,25 +5838,6 @@ def test_update_database(request_type, transport: str = "grpc"): assert isinstance(response, future.Future) -def test_update_database_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_database), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_database() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.UpdateDatabaseRequest() - - def test_update_database_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -6527,27 +5902,6 @@ def test_update_database_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_database_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.update_database() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.UpdateDatabaseRequest() - - @pytest.mark.asyncio async def test_update_database_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -6556,7 +5910,7 @@ async def test_update_database_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6600,7 +5954,7 @@ async def test_update_database_async( transport: str = "grpc_asyncio", request_type=firestore_admin.UpdateDatabaseRequest ): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6663,7 +6017,7 @@ def test_update_database_field_headers(): @pytest.mark.asyncio async def test_update_database_field_headers_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6738,7 +6092,7 @@ def test_update_database_flattened_error(): @pytest.mark.asyncio async def test_update_database_flattened_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6771,7 +6125,7 @@ async def test_update_database_flattened_async(): @pytest.mark.asyncio async def test_update_database_flattened_error_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -6817,25 +6171,6 @@ def test_delete_database(request_type, transport: str = "grpc"): assert isinstance(response, future.Future) -def test_delete_database_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_database), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_database() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DeleteDatabaseRequest() - - def test_delete_database_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -6906,27 +6241,6 @@ def test_delete_database_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_database_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.delete_database() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DeleteDatabaseRequest() - - @pytest.mark.asyncio async def test_delete_database_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -6935,7 +6249,7 @@ async def test_delete_database_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6979,7 +6293,7 @@ async def test_delete_database_async( transport: str = "grpc_asyncio", request_type=firestore_admin.DeleteDatabaseRequest ): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7042,7 +6356,7 @@ def test_delete_database_field_headers(): @pytest.mark.asyncio async def test_delete_database_field_headers_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7112,7 +6426,7 @@ def test_delete_database_flattened_error(): @pytest.mark.asyncio async def test_delete_database_flattened_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7141,7 +6455,7 @@ async def test_delete_database_flattened_async(): @pytest.mark.asyncio async def test_delete_database_flattened_error_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -7195,25 +6509,6 @@ def test_get_backup(request_type, transport: str = "grpc"): assert response.state == backup.Backup.State.CREATING -def test_get_backup_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_backup() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetBackupRequest() - - def test_get_backup_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -7277,39 +6572,13 @@ def test_get_backup_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_backup_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backup.Backup( - name="name_value", - database="database_value", - database_uid="database_uid_value", - state=backup.Backup.State.CREATING, - ) - ) - response = await client.get_backup() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetBackupRequest() - - @pytest.mark.asyncio async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7348,7 +6617,7 @@ async def test_get_backup_async( transport: str = "grpc_asyncio", request_type=firestore_admin.GetBackupRequest ): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7420,7 +6689,7 @@ def test_get_backup_field_headers(): @pytest.mark.asyncio async def test_get_backup_field_headers_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7488,7 +6757,7 @@ def test_get_backup_flattened_error(): @pytest.mark.asyncio async def test_get_backup_flattened_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7515,7 +6784,7 @@ async def test_get_backup_flattened_async(): @pytest.mark.asyncio async def test_get_backup_flattened_error_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -7563,25 +6832,6 @@ def test_list_backups(request_type, transport: str = "grpc"): assert response.unreachable == ["unreachable_value"] -def test_list_backups_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_backups() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListBackupsRequest() - - def test_list_backups_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -7595,6 +6845,7 @@ def test_list_backups_non_empty_request_with_auto_populated_field(): # if they meet the requirements of AIP 4235. request = firestore_admin.ListBackupsRequest( parent="parent_value", + filter="filter_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7607,6 +6858,7 @@ def test_list_backups_non_empty_request_with_auto_populated_field(): _, args, _ = call.mock_calls[0] assert args[0] == firestore_admin.ListBackupsRequest( parent="parent_value", + filter="filter_value", ) @@ -7645,29 +6897,6 @@ def test_list_backups_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_backups_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListBackupsResponse( - unreachable=["unreachable_value"], - ) - ) - response = await client.list_backups() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListBackupsRequest() - - @pytest.mark.asyncio async def test_list_backups_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -7676,7 +6905,7 @@ async def test_list_backups_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7715,7 +6944,7 @@ async def test_list_backups_async( transport: str = "grpc_asyncio", request_type=firestore_admin.ListBackupsRequest ): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7781,7 +7010,7 @@ def test_list_backups_field_headers(): @pytest.mark.asyncio async def test_list_backups_field_headers_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7851,7 +7080,7 @@ def test_list_backups_flattened_error(): @pytest.mark.asyncio async def test_list_backups_flattened_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7880,7 +7109,7 @@ async def test_list_backups_flattened_async(): @pytest.mark.asyncio async def test_list_backups_flattened_error_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -7925,25 +7154,6 @@ def test_delete_backup(request_type, transport: str = "grpc"): assert response is None -def test_delete_backup_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_backup() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DeleteBackupRequest() - - def test_delete_backup_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -8007,25 +7217,6 @@ def test_delete_backup_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_backup_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_backup() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DeleteBackupRequest() - - @pytest.mark.asyncio async def test_delete_backup_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -8034,7 +7225,7 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -8073,7 +7264,7 @@ async def test_delete_backup_async( transport: str = "grpc_asyncio", request_type=firestore_admin.DeleteBackupRequest ): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -8134,7 +7325,7 @@ def test_delete_backup_field_headers(): @pytest.mark.asyncio async def test_delete_backup_field_headers_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -8202,7 +7393,7 @@ def test_delete_backup_flattened_error(): @pytest.mark.asyncio async def test_delete_backup_flattened_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8229,7 +7420,7 @@ async def test_delete_backup_flattened_async(): @pytest.mark.asyncio async def test_delete_backup_flattened_error_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -8274,25 +7465,6 @@ def test_restore_database(request_type, transport: str = "grpc"): assert isinstance(response, future.Future) -def test_restore_database_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_database), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.restore_database() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.RestoreDatabaseRequest() - - def test_restore_database_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -8367,27 +7539,6 @@ def test_restore_database_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_restore_database_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.restore_database() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.RestoreDatabaseRequest() - - @pytest.mark.asyncio async def test_restore_database_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -8396,7 +7547,7 @@ async def test_restore_database_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -8440,7 +7591,7 @@ async def test_restore_database_async( transport: str = "grpc_asyncio", request_type=firestore_admin.RestoreDatabaseRequest ): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -8503,7 +7654,7 @@ def test_restore_database_field_headers(): @pytest.mark.asyncio async def test_restore_database_field_headers_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -8570,27 +7721,6 @@ def test_create_backup_schedule(request_type, transport: str = "grpc"): assert response.name == "name_value" -def test_create_backup_schedule_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_backup_schedule() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.CreateBackupScheduleRequest() - - def test_create_backup_schedule_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -8661,31 +7791,6 @@ def test_create_backup_schedule_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_backup_schedule_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - schedule.BackupSchedule( - name="name_value", - ) - ) - response = await client.create_backup_schedule() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.CreateBackupScheduleRequest() - - @pytest.mark.asyncio async def test_create_backup_schedule_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -8694,7 +7799,7 @@ async def test_create_backup_schedule_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -8734,7 +7839,7 @@ async def test_create_backup_schedule_async( request_type=firestore_admin.CreateBackupScheduleRequest, ): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -8804,7 +7909,7 @@ def test_create_backup_schedule_field_headers(): @pytest.mark.asyncio async def test_create_backup_schedule_field_headers_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -8883,7 +7988,7 @@ def test_create_backup_schedule_flattened_error(): @pytest.mark.asyncio async def test_create_backup_schedule_flattened_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8918,7 +8023,7 @@ async def test_create_backup_schedule_flattened_async(): @pytest.mark.asyncio async def test_create_backup_schedule_flattened_error_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -8969,27 +8074,6 @@ def test_get_backup_schedule(request_type, transport: str = "grpc"): assert response.name == "name_value" -def test_get_backup_schedule_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_backup_schedule() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetBackupScheduleRequest() - - def test_get_backup_schedule_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -9059,31 +8143,6 @@ def test_get_backup_schedule_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_backup_schedule_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - schedule.BackupSchedule( - name="name_value", - ) - ) - response = await client.get_backup_schedule() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetBackupScheduleRequest() - - @pytest.mark.asyncio async def test_get_backup_schedule_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -9092,7 +8151,7 @@ async def test_get_backup_schedule_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -9132,7 +8191,7 @@ async def test_get_backup_schedule_async( request_type=firestore_admin.GetBackupScheduleRequest, ): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -9202,7 +8261,7 @@ def test_get_backup_schedule_field_headers(): @pytest.mark.asyncio async def test_get_backup_schedule_field_headers_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9276,7 +8335,7 @@ def test_get_backup_schedule_flattened_error(): @pytest.mark.asyncio async def test_get_backup_schedule_flattened_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -9307,7 +8366,7 @@ async def test_get_backup_schedule_flattened_async(): @pytest.mark.asyncio async def test_get_backup_schedule_flattened_error_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -9354,27 +8413,6 @@ def test_list_backup_schedules(request_type, transport: str = "grpc"): assert isinstance(response, firestore_admin.ListBackupSchedulesResponse) -def test_list_backup_schedules_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_backup_schedules() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListBackupSchedulesRequest() - - def test_list_backup_schedules_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -9445,29 +8483,6 @@ def test_list_backup_schedules_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_backup_schedules_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListBackupSchedulesResponse() - ) - response = await client.list_backup_schedules() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListBackupSchedulesRequest() - - @pytest.mark.asyncio async def test_list_backup_schedules_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -9476,7 +8491,7 @@ async def test_list_backup_schedules_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -9516,7 +8531,7 @@ async def test_list_backup_schedules_async( request_type=firestore_admin.ListBackupSchedulesRequest, ): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -9583,7 +8598,7 @@ def test_list_backup_schedules_field_headers(): @pytest.mark.asyncio async def test_list_backup_schedules_field_headers_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9657,7 +8672,7 @@ def test_list_backup_schedules_flattened_error(): @pytest.mark.asyncio async def test_list_backup_schedules_flattened_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -9688,7 +8703,7 @@ async def test_list_backup_schedules_flattened_async(): @pytest.mark.asyncio async def test_list_backup_schedules_flattened_error_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -9738,27 +8753,6 @@ def test_update_backup_schedule(request_type, transport: str = "grpc"): assert response.name == "name_value" -def test_update_backup_schedule_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_backup_schedule() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.UpdateBackupScheduleRequest() - - def test_update_backup_schedule_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -9825,31 +8819,6 @@ def test_update_backup_schedule_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_backup_schedule_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - schedule.BackupSchedule( - name="name_value", - ) - ) - response = await client.update_backup_schedule() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.UpdateBackupScheduleRequest() - - @pytest.mark.asyncio async def test_update_backup_schedule_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -9858,7 +8827,7 @@ async def test_update_backup_schedule_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -9898,7 +8867,7 @@ async def test_update_backup_schedule_async( request_type=firestore_admin.UpdateBackupScheduleRequest, ): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -9968,7 +8937,7 @@ def test_update_backup_schedule_field_headers(): @pytest.mark.asyncio async def test_update_backup_schedule_field_headers_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -10047,7 +9016,7 @@ def test_update_backup_schedule_flattened_error(): @pytest.mark.asyncio async def test_update_backup_schedule_flattened_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -10082,7 +9051,7 @@ async def test_update_backup_schedule_flattened_async(): @pytest.mark.asyncio async def test_update_backup_schedule_flattened_error_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -10130,27 +9099,6 @@ def test_delete_backup_schedule(request_type, transport: str = "grpc"): assert response is None -def test_delete_backup_schedule_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_backup_schedule() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DeleteBackupScheduleRequest() - - def test_delete_backup_schedule_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -10221,27 +9169,6 @@ def test_delete_backup_schedule_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_backup_schedule_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_backup_schedule() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DeleteBackupScheduleRequest() - - @pytest.mark.asyncio async def test_delete_backup_schedule_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -10250,7 +9177,7 @@ async def test_delete_backup_schedule_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -10290,7 +9217,7 @@ async def test_delete_backup_schedule_async( request_type=firestore_admin.DeleteBackupScheduleRequest, ): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -10355,7 +9282,7 @@ def test_delete_backup_schedule_field_headers(): @pytest.mark.asyncio async def test_delete_backup_schedule_field_headers_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -10427,7 +9354,7 @@ def test_delete_backup_schedule_flattened_error(): @pytest.mark.asyncio async def test_delete_backup_schedule_flattened_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -10456,7 +9383,7 @@ async def test_delete_backup_schedule_flattened_async(): @pytest.mark.asyncio async def test_delete_backup_schedule_flattened_error_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -10468,156 +9395,38 @@ async def test_delete_backup_schedule_flattened_error_async(): ) -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.CreateIndexRequest, - dict, - ], -) -def test_create_index_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +def test_create_index_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/databases/sample2/collectionGroups/sample3" - } - request_init["index"] = { - "name": "name_value", - "query_scope": 1, - "api_scope": 1, - "fields": [ - { - "field_path": "field_path_value", - "order": 1, - "array_config": 1, - "vector_config": {"dimension": 966, "flat": {}}, - } - ], - "state": 1, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Determine if the message type is proto-plus or protobuf - test_field = firestore_admin.CreateIndexRequest.meta.fields["index"] + # Ensure method has been cached + assert client._transport.create_index in client._transport._wrapped_methods - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_index] = mock_rpc - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + request = {} + client.create_index(request) - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["index"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["index"][field])): - del request_init["index"][field][i][subfield] - else: - del request_init["index"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_index(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_create_index_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_index in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.create_index] = mock_rpc - - request = {} - client.create_index(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() client.create_index(request) @@ -10690,6 +9499,7 @@ def test_create_index_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_index(request) @@ -10715,91 +9525,6 @@ def test_create_index_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_index_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_create_index" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_create_index" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.CreateIndexRequest.pb( - firestore_admin.CreateIndexRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - - request = firestore_admin.CreateIndexRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_index( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_index_rest_bad_request( - transport: str = "rest", request_type=firestore_admin.CreateIndexRequest -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/databases/sample2/collectionGroups/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_index(request) - - def test_create_index_rest_flattened(): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10829,6 +9554,7 @@ def test_create_index_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_index(**mock_args) @@ -10859,54 +9585,6 @@ def test_create_index_rest_flattened_error(transport: str = "rest"): ) -def test_create_index_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.ListIndexesRequest, - dict, - ], -) -def test_list_indexes_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/databases/sample2/collectionGroups/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListIndexesResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore_admin.ListIndexesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_indexes(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListIndexesPager) - assert response.next_page_token == "next_page_token_value" - - def test_list_indexes_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -11017,6 +9695,7 @@ def test_list_indexes_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_indexes(request) @@ -11043,99 +9722,16 @@ def test_list_indexes_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_indexes_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( +def test_list_indexes_rest_flattened(): + client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), + transport="rest", ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_list_indexes" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_list_indexes" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.ListIndexesRequest.pb( - firestore_admin.ListIndexesRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = firestore_admin.ListIndexesResponse.to_json( - firestore_admin.ListIndexesResponse() - ) - - request = firestore_admin.ListIndexesRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore_admin.ListIndexesResponse() - - client.list_indexes( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_indexes_rest_bad_request( - transport: str = "rest", request_type=firestore_admin.ListIndexesRequest -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/databases/sample2/collectionGroups/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_indexes(request) - - -def test_list_indexes_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListIndexesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = firestore_admin.ListIndexesResponse() # get arguments that satisfy an http rule for this method sample_request = { @@ -11156,6 +9752,7 @@ def test_list_indexes_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_indexes(**mock_args) @@ -11250,54 +9847,6 @@ def test_list_indexes_rest_pager(transport: str = "rest"): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.GetIndexRequest, - dict, - ], -) -def test_get_index_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/databases/sample2/collectionGroups/sample3/indexes/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = index.Index( - name="name_value", - query_scope=index.Index.QueryScope.COLLECTION, - api_scope=index.Index.ApiScope.DATASTORE_MODE_API, - state=index.Index.State.CREATING, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = index.Index.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_index(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, index.Index) - assert response.name == "name_value" - assert response.query_scope == index.Index.QueryScope.COLLECTION - assert response.api_scope == index.Index.ApiScope.DATASTORE_MODE_API - assert response.state == index.Index.State.CREATING - - def test_get_index_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -11398,6 +9947,7 @@ def test_get_index_rest_required_fields(request_type=firestore_admin.GetIndexReq response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_index(request) @@ -11415,87 +9965,6 @@ def test_get_index_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_index_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_get_index" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_get_index" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.GetIndexRequest.pb( - firestore_admin.GetIndexRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = index.Index.to_json(index.Index()) - - request = firestore_admin.GetIndexRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = index.Index() - - client.get_index( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_index_rest_bad_request( - transport: str = "rest", request_type=firestore_admin.GetIndexRequest -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/databases/sample2/collectionGroups/sample3/indexes/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_index(request) - - def test_get_index_rest_flattened(): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11526,6 +9995,7 @@ def test_get_index_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_index(**mock_args) @@ -11555,49 +10025,6 @@ def test_get_index_rest_flattened_error(transport: str = "rest"): ) -def test_get_index_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.DeleteIndexRequest, - dict, - ], -) -def test_delete_index_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/databases/sample2/collectionGroups/sample3/indexes/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_index(request) - - # Establish that the response is the type that we expect. - assert response is None - - def test_delete_index_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -11697,6 +10124,7 @@ def test_delete_index_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_index(request) @@ -11714,102 +10142,27 @@ def test_delete_index_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_index_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( +def test_delete_index_rest_flattened(): + client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), + transport="rest", ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_delete_index" - ) as pre: - pre.assert_not_called() - pb_message = firestore_admin.DeleteIndexRequest.pb( - firestore_admin.DeleteIndexRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/databases/sample2/collectionGroups/sample3/indexes/sample4" } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = firestore_admin.DeleteIndexRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_index( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_index_rest_bad_request( - transport: str = "rest", request_type=firestore_admin.DeleteIndexRequest -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/databases/sample2/collectionGroups/sample3/indexes/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_index(request) - - -def test_delete_index_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/databases/sample2/collectionGroups/sample3/indexes/sample4" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() @@ -11817,6 +10170,7 @@ def test_delete_index_rest_flattened(): json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_index(**mock_args) @@ -11846,54 +10200,6 @@ def test_delete_index_rest_flattened_error(transport: str = "rest"): ) -def test_delete_index_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.GetFieldRequest, - dict, - ], -) -def test_get_field_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = field.Field( - name="name_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = field.Field.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_field(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, field.Field) - assert response.name == "name_value" - - def test_get_field_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -11994,6 +10300,7 @@ def test_get_field_rest_required_fields(request_type=firestore_admin.GetFieldReq response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_field(request) @@ -12011,87 +10318,6 @@ def test_get_field_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_field_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_get_field" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_get_field" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.GetFieldRequest.pb( - firestore_admin.GetFieldRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = field.Field.to_json(field.Field()) - - request = firestore_admin.GetFieldRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = field.Field() - - client.get_field( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_field_rest_bad_request( - transport: str = "rest", request_type=firestore_admin.GetFieldRequest -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_field(request) - - def test_get_field_rest_flattened(): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12122,6 +10348,7 @@ def test_get_field_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_field(**mock_args) @@ -12151,165 +10378,28 @@ def test_get_field_rest_flattened_error(transport: str = "rest"): ) -def test_get_field_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - +def test_update_field_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.UpdateFieldRequest, - dict, - ], -) -def test_update_field_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # send a request that will satisfy transcoding - request_init = { - "field": { - "name": "projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4" - } - } - request_init["field"] = { - "name": "projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4", - "index_config": { - "indexes": [ - { - "name": "name_value", - "query_scope": 1, - "api_scope": 1, - "fields": [ - { - "field_path": "field_path_value", - "order": 1, - "array_config": 1, - "vector_config": {"dimension": 966, "flat": {}}, - } - ], - "state": 1, - } - ], - "uses_ancestor_config": True, - "ancestor_field": "ancestor_field_value", - "reverting": True, - }, - "ttl_config": {"state": 1}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + # Ensure method has been cached + assert client._transport.update_field in client._transport._wrapped_methods - # Determine if the message type is proto-plus or protobuf - test_field = firestore_admin.UpdateFieldRequest.meta.fields["field"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["field"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["field"][field])): - del request_init["field"][field][i][subfield] - else: - del request_init["field"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_field(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_update_field_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_field in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.update_field] = mock_rpc + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_field] = mock_rpc request = {} client.update_field(request) @@ -12389,6 +10479,7 @@ def test_update_field_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_field(request) @@ -12406,93 +10497,6 @@ def test_update_field_rest_unset_required_fields(): assert set(unset_fields) == (set(("updateMask",)) & set(("field",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_field_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_update_field" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_update_field" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.UpdateFieldRequest.pb( - firestore_admin.UpdateFieldRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - - request = firestore_admin.UpdateFieldRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.update_field( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_field_rest_bad_request( - transport: str = "rest", request_type=firestore_admin.UpdateFieldRequest -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "field": { - "name": "projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4" - } - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_field(request) - - def test_update_field_rest_flattened(): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12523,6 +10527,7 @@ def test_update_field_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_field(**mock_args) @@ -12552,54 +10557,6 @@ def test_update_field_rest_flattened_error(transport: str = "rest"): ) -def test_update_field_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.ListFieldsRequest, - dict, - ], -) -def test_list_fields_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/databases/sample2/collectionGroups/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListFieldsResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore_admin.ListFieldsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_fields(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListFieldsPager) - assert response.next_page_token == "next_page_token_value" - - def test_list_fields_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -12710,6 +10667,7 @@ def test_list_fields_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_fields(request) @@ -12736,93 +10694,10 @@ def test_list_fields_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_fields_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( +def test_list_fields_rest_flattened(): + client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_list_fields" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_list_fields" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.ListFieldsRequest.pb( - firestore_admin.ListFieldsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = firestore_admin.ListFieldsResponse.to_json( - firestore_admin.ListFieldsResponse() - ) - - request = firestore_admin.ListFieldsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore_admin.ListFieldsResponse() - - client.list_fields( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_fields_rest_bad_request( - transport: str = "rest", request_type=firestore_admin.ListFieldsRequest -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/databases/sample2/collectionGroups/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_fields(request) - - -def test_list_fields_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="rest", ) # Mock the http request call within the method and fake a response. @@ -12849,6 +10724,7 @@ def test_list_fields_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_fields(**mock_args) @@ -12943,41 +10819,6 @@ def test_list_fields_rest_pager(transport: str = "rest"): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.ExportDocumentsRequest, - dict, - ], -) -def test_export_documents_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.export_documents(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - def test_export_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -13084,6 +10925,7 @@ def test_export_documents_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.export_documents(request) @@ -13101,89 +10943,6 @@ def test_export_documents_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_documents_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_export_documents" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_export_documents" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.ExportDocumentsRequest.pb( - firestore_admin.ExportDocumentsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - - request = firestore_admin.ExportDocumentsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.export_documents( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_export_documents_rest_bad_request( - transport: str = "rest", request_type=firestore_admin.ExportDocumentsRequest -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.export_documents(request) - - def test_export_documents_rest_flattened(): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13210,6 +10969,7 @@ def test_export_documents_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.export_documents(**mock_args) @@ -13239,47 +10999,6 @@ def test_export_documents_rest_flattened_error(transport: str = "rest"): ) -def test_export_documents_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.ImportDocumentsRequest, - dict, - ], -) -def test_import_documents_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.import_documents(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - def test_import_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -13386,6 +11105,7 @@ def test_import_documents_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.import_documents(request) @@ -13403,102 +11123,19 @@ def test_import_documents_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_import_documents_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( +def test_import_documents_rest_flattened(): + client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), + transport="rest", ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_import_documents" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_import_documents" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.ImportDocumentsRequest.pb( - firestore_admin.ImportDocumentsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") - request = firestore_admin.ImportDocumentsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.import_documents( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_import_documents_rest_bad_request( - transport: str = "rest", request_type=firestore_admin.ImportDocumentsRequest -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.import_documents(request) - - -def test_import_documents_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/databases/sample2"} + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/databases/sample2"} # get truthy value for each flattened field mock_args = dict( @@ -13512,6 +11149,7 @@ def test_import_documents_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.import_documents(**mock_args) @@ -13541,47 +11179,6 @@ def test_import_documents_rest_flattened_error(transport: str = "rest"): ) -def test_import_documents_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.BulkDeleteDocumentsRequest, - dict, - ], -) -def test_bulk_delete_documents_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.bulk_delete_documents(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - def test_bulk_delete_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -13691,6 +11288,7 @@ def test_bulk_delete_documents_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.bulk_delete_documents(request) @@ -13708,89 +11306,6 @@ def test_bulk_delete_documents_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_bulk_delete_documents_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_bulk_delete_documents" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_bulk_delete_documents" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.BulkDeleteDocumentsRequest.pb( - firestore_admin.BulkDeleteDocumentsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - - request = firestore_admin.BulkDeleteDocumentsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.bulk_delete_documents( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_bulk_delete_documents_rest_bad_request( - transport: str = "rest", request_type=firestore_admin.BulkDeleteDocumentsRequest -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.bulk_delete_documents(request) - - def test_bulk_delete_documents_rest_flattened(): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13817,6 +11332,7 @@ def test_bulk_delete_documents_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.bulk_delete_documents(**mock_args) @@ -13846,177 +11362,40 @@ def test_bulk_delete_documents_rest_flattened_error(transport: str = "rest"): ) -def test_bulk_delete_documents_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) +def test_create_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.CreateDatabaseRequest, - dict, - ], -) -def test_create_database_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # Ensure method has been cached + assert client._transport.create_database in client._transport._wrapped_methods - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1"} - request_init["database"] = { - "name": "name_value", - "uid": "uid_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "delete_time": {}, - "location_id": "location_id_value", - "type_": 1, - "concurrency_mode": 1, - "version_retention_period": {"seconds": 751, "nanos": 543}, - "earliest_version_time": {}, - "point_in_time_recovery_enablement": 1, - "app_engine_integration_mode": 1, - "key_prefix": "key_prefix_value", - "delete_protection_state": 1, - "cmek_config": { - "kms_key_name": "kms_key_name_value", - "active_key_version": [ - "active_key_version_value1", - "active_key_version_value2", - ], - }, - "previous_id": "previous_id_value", - "source_info": { - "backup": {"backup": "backup_value"}, - "operation": "operation_value", - }, - "etag": "etag_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_database] = mock_rpc - # Determine if the message type is proto-plus or protobuf - test_field = firestore_admin.CreateDatabaseRequest.meta.fields["database"] + request = {} + client.create_database(request) - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["database"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["database"][field])): - del request_init["database"][field][i][subfield] - else: - del request_init["database"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_database(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_create_database_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.create_database] = mock_rpc - - request = {} - client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_database(request) + client.create_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14096,6 +11475,7 @@ def test_create_database_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_database(request) @@ -14128,89 +11508,6 @@ def test_create_database_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_database_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_create_database" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_create_database" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.CreateDatabaseRequest.pb( - firestore_admin.CreateDatabaseRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - - request = firestore_admin.CreateDatabaseRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_database( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_database_rest_bad_request( - transport: str = "rest", request_type=firestore_admin.CreateDatabaseRequest -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_database(request) - - def test_create_database_rest_flattened(): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14239,6 +11536,7 @@ def test_create_database_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_database(**mock_args) @@ -14268,81 +11566,6 @@ def test_create_database_rest_flattened_error(transport: str = "rest"): ) -def test_create_database_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.GetDatabaseRequest, - dict, - ], -) -def test_get_database_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = database.Database( - name="name_value", - uid="uid_value", - location_id="location_id_value", - type_=database.Database.DatabaseType.FIRESTORE_NATIVE, - concurrency_mode=database.Database.ConcurrencyMode.OPTIMISTIC, - point_in_time_recovery_enablement=database.Database.PointInTimeRecoveryEnablement.POINT_IN_TIME_RECOVERY_ENABLED, - app_engine_integration_mode=database.Database.AppEngineIntegrationMode.ENABLED, - key_prefix="key_prefix_value", - delete_protection_state=database.Database.DeleteProtectionState.DELETE_PROTECTION_DISABLED, - previous_id="previous_id_value", - etag="etag_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = database.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_database(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, database.Database) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.location_id == "location_id_value" - assert response.type_ == database.Database.DatabaseType.FIRESTORE_NATIVE - assert response.concurrency_mode == database.Database.ConcurrencyMode.OPTIMISTIC - assert ( - response.point_in_time_recovery_enablement - == database.Database.PointInTimeRecoveryEnablement.POINT_IN_TIME_RECOVERY_ENABLED - ) - assert ( - response.app_engine_integration_mode - == database.Database.AppEngineIntegrationMode.ENABLED - ) - assert response.key_prefix == "key_prefix_value" - assert ( - response.delete_protection_state - == database.Database.DeleteProtectionState.DELETE_PROTECTION_DISABLED - ) - assert response.previous_id == "previous_id_value" - assert response.etag == "etag_value" - - def test_get_database_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -14445,6 +11668,7 @@ def test_get_database_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_database(request) @@ -14462,87 +11686,8 @@ def test_get_database_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_database_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_get_database" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_get_database" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.GetDatabaseRequest.pb( - firestore_admin.GetDatabaseRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = database.Database.to_json(database.Database()) - - request = firestore_admin.GetDatabaseRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = database.Database() - - client.get_database( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_database_rest_bad_request( - transport: str = "rest", request_type=firestore_admin.GetDatabaseRequest -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_database(request) - - -def test_get_database_rest_flattened(): - client = FirestoreAdminClient( +def test_get_database_rest_flattened(): + client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -14569,6 +11714,7 @@ def test_get_database_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_database(**mock_args) @@ -14596,52 +11742,6 @@ def test_get_database_rest_flattened_error(transport: str = "rest"): ) -def test_get_database_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.ListDatabasesRequest, - dict, - ], -) -def test_list_databases_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListDatabasesResponse( - unreachable=["unreachable_value"], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore_admin.ListDatabasesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_databases(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore_admin.ListDatabasesResponse) - assert response.unreachable == ["unreachable_value"] - - def test_list_databases_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -14746,6 +11846,7 @@ def test_list_databases_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_databases(request) @@ -14763,87 +11864,6 @@ def test_list_databases_rest_unset_required_fields(): assert set(unset_fields) == (set(("showDeleted",)) & set(("parent",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_databases_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_list_databases" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_list_databases" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.ListDatabasesRequest.pb( - firestore_admin.ListDatabasesRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = firestore_admin.ListDatabasesResponse.to_json( - firestore_admin.ListDatabasesResponse() - ) - - request = firestore_admin.ListDatabasesRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore_admin.ListDatabasesResponse() - - client.list_databases( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_databases_rest_bad_request( - transport: str = "rest", request_type=firestore_admin.ListDatabasesRequest -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_databases(request) - - def test_list_databases_rest_flattened(): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14872,6 +11892,7 @@ def test_list_databases_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_databases(**mock_args) @@ -14899,144 +11920,183 @@ def test_list_databases_rest_flattened_error(transport: str = "rest"): ) -def test_list_databases_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) +def test_update_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.UpdateDatabaseRequest, - dict, - ], -) -def test_update_database_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + # Ensure method has been cached + assert client._transport.update_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_database] = mock_rpc + + request = {} + client.update_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_database_rest_required_fields( + request_type=firestore_admin.UpdateDatabaseRequest, +): + transport_class = transports.FirestoreAdminRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # send a request that will satisfy transcoding - request_init = {"database": {"name": "projects/sample1/databases/sample2"}} - request_init["database"] = { - "name": "projects/sample1/databases/sample2", - "uid": "uid_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "delete_time": {}, - "location_id": "location_id_value", - "type_": 1, - "concurrency_mode": 1, - "version_retention_period": {"seconds": 751, "nanos": 543}, - "earliest_version_time": {}, - "point_in_time_recovery_enablement": 1, - "app_engine_integration_mode": 1, - "key_prefix": "key_prefix_value", - "delete_protection_state": 1, - "cmek_config": { - "kms_key_name": "kms_key_name_value", - "active_key_version": [ - "active_key_version_value1", - "active_key_version_value2", - ], - }, - "previous_id": "previous_id_value", - "source_info": { - "backup": {"backup": "backup_value"}, - "operation": "operation_value", - }, - "etag": "etag_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + # verify fields with default values are dropped - # Determine if the message type is proto-plus or protobuf - test_field = firestore_admin.UpdateDatabaseRequest.meta.fields["database"] + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # verify required fields with default values are now present - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_database._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + # verify required fields with non-default values are left alone - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) - subfields_not_in_runtime = [] + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["database"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_database(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_database_rest_unset_required_fields(): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("database",))) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["database"][field])): - del request_init["database"][field][i][subfield] - else: - del request_init["database"][field][subfield] - request = request_type(**request_init) + +def test_update_database_rest_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.Operation(name="operations/spam") + # get arguments that satisfy an http rule for this method + sample_request = {"database": {"name": "projects/sample1/databases/sample2"}} + + # get truthy value for each flattened field + mock_args = dict( + database=gfa_database.Database(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_database(request) + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + client.update_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{database.name=projects/*/databases/*}" % client.transport._host, + args[1], + ) -def test_update_database_rest_use_cached_wrapped_rpc(): +def test_update_database_rest_flattened_error(transport: str = "rest"): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_database( + firestore_admin.UpdateDatabaseRequest(), + database=gfa_database.Database(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_database_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15050,17 +12110,17 @@ def test_update_database_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_database in client._transport._wrapped_methods + assert client._transport.delete_database in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_database] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_database] = mock_rpc request = {} - client.update_database(request) + client.delete_database(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -15069,19 +12129,20 @@ def test_update_database_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_database(request) + client.delete_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_database_rest_required_fields( - request_type=firestore_admin.UpdateDatabaseRequest, +def test_delete_database_rest_required_fields( + request_type=firestore_admin.DeleteDatabaseRequest, ): transport_class = transports.FirestoreAdminRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15092,19 +12153,23 @@ def test_update_database_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_database._get_unset_required_fields(jsonified_request) + ).delete_database._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_database._get_unset_required_fields(jsonified_request) + ).delete_database._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + assert not set(unset_fields) - set(("etag",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15125,10 +12190,9 @@ def test_update_database_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -15137,110 +12201,28 @@ def test_update_database_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_database(request) + response = client.delete_database(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_database_rest_unset_required_fields(): +def test_delete_database_rest_unset_required_fields(): transport = transports.FirestoreAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("database",))) + unset_fields = transport.delete_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag",)) & set(("name",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_database_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( +def test_delete_database_rest_flattened(): + client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_update_database" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_update_database" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.UpdateDatabaseRequest.pb( - firestore_admin.UpdateDatabaseRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - - request = firestore_admin.UpdateDatabaseRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.update_database( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_database_rest_bad_request( - transport: str = "rest", request_type=firestore_admin.UpdateDatabaseRequest -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"database": {"name": "projects/sample1/databases/sample2"}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_database(request) - - -def test_update_database_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="rest", ) # Mock the http request call within the method and fake a response. @@ -15249,12 +12231,11 @@ def test_update_database_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"database": {"name": "projects/sample1/databases/sample2"}} + sample_request = {"name": "projects/sample1/databases/sample2"} # get truthy value for each flattened field mock_args = dict( - database=gfa_database.Database(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -15264,20 +12245,20 @@ def test_update_database_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_database(**mock_args) + client.delete_database(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{database.name=projects/*/databases/*}" % client.transport._host, - args[1], + "%s/v1/{name=projects/*/databases/*}" % client.transport._host, args[1] ) -def test_update_database_rest_flattened_error(transport: str = "rest"): +def test_delete_database_rest_flattened_error(transport: str = "rest"): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15286,55 +12267,13 @@ def test_update_database_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_database( - firestore_admin.UpdateDatabaseRequest(), - database=gfa_database.Database(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_database( + firestore_admin.DeleteDatabaseRequest(), + name="name_value", ) -def test_update_database_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.DeleteDatabaseRequest, - dict, - ], -) -def test_delete_database_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_database(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_delete_database_rest_use_cached_wrapped_rpc(): +def test_get_backup_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15348,35 +12287,29 @@ def test_delete_database_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_database in client._transport._wrapped_methods + assert client._transport.get_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_database] = mock_rpc + client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc request = {} - client.delete_database(request) + client.get_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_database(request) + client.get_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_database_rest_required_fields( - request_type=firestore_admin.DeleteDatabaseRequest, -): +def test_get_backup_rest_required_fields(request_type=firestore_admin.GetBackupRequest): transport_class = transports.FirestoreAdminRestTransport request_init = {} @@ -15391,7 +12324,7 @@ def test_delete_database_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_database._get_unset_required_fields(jsonified_request) + ).get_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -15400,9 +12333,7 @@ def test_delete_database_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_database._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag",)) + ).get_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -15416,7 +12347,7 @@ def test_delete_database_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backup.Backup() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15428,118 +12359,39 @@ def test_delete_database_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup.Backup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_database(request) + response = client.get_backup(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_database_rest_unset_required_fields(): +def test_get_backup_rest_unset_required_fields(): transport = transports.FirestoreAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag",)) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_database_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_delete_database" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_delete_database" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.DeleteDatabaseRequest.pb( - firestore_admin.DeleteDatabaseRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - - request = firestore_admin.DeleteDatabaseRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.delete_database( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_delete_database_rest_bad_request( - transport: str = "rest", request_type=firestore_admin.DeleteDatabaseRequest -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_database(request) + unset_fields = transport.get_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_database_rest_flattened(): +def test_get_backup_rest_flattened(): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15548,10 +12400,10 @@ def test_delete_database_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backup.Backup() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/databases/sample2"} + sample_request = {"name": "projects/sample1/locations/sample2/backups/sample3"} # get truthy value for each flattened field mock_args = dict( @@ -15562,22 +12414,26 @@ def test_delete_database_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup.Backup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_database(**mock_args) + client.get_backup(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/databases/*}" % client.transport._host, args[1] + "%s/v1/{name=projects/*/locations/*/backups/*}" % client.transport._host, + args[1], ) -def test_delete_database_rest_flattened_error(transport: str = "rest"): +def test_get_backup_rest_flattened_error(transport: str = "rest"): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15586,65 +12442,13 @@ def test_delete_database_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_database( - firestore_admin.DeleteDatabaseRequest(), + client.get_backup( + firestore_admin.GetBackupRequest(), name="name_value", ) -def test_delete_database_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.GetBackupRequest, - dict, - ], -) -def test_get_backup_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = backup.Backup( - name="name_value", - database="database_value", - database_uid="database_uid_value", - state=backup.Backup.State.CREATING, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backup.Backup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_backup(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, backup.Backup) - assert response.name == "name_value" - assert response.database == "database_value" - assert response.database_uid == "database_uid_value" - assert response.state == backup.Backup.State.CREATING - - -def test_get_backup_rest_use_cached_wrapped_rpc(): +def test_list_backups_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15658,33 +12462,35 @@ def test_get_backup_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_backup in client._transport._wrapped_methods + assert client._transport.list_backups in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc + client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc request = {} - client.get_backup(request) + client.list_backups(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_backup(request) + client.list_backups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_backup_rest_required_fields(request_type=firestore_admin.GetBackupRequest): +def test_list_backups_rest_required_fields( + request_type=firestore_admin.ListBackupsRequest, +): transport_class = transports.FirestoreAdminRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15695,21 +12501,23 @@ def test_get_backup_rest_required_fields(request_type=firestore_admin.GetBackupR unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_backup._get_unset_required_fields(jsonified_request) + ).list_backups._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_backup._get_unset_required_fields(jsonified_request) + ).list_backups._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15718,7 +12526,7 @@ def test_get_backup_rest_required_fields(request_type=firestore_admin.GetBackupR request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = backup.Backup() + return_value = firestore_admin.ListBackupsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15739,108 +12547,30 @@ def test_get_backup_rest_required_fields(request_type=firestore_admin.GetBackupR response_value.status_code = 200 # Convert return value to protobuf type - return_value = backup.Backup.pb(return_value) + return_value = firestore_admin.ListBackupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_backup(request) + response = client.list_backups(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_backup_rest_unset_required_fields(): +def test_list_backups_rest_unset_required_fields(): transport = transports.FirestoreAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_backup._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_backup_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_get_backup" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_get_backup" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.GetBackupRequest.pb( - firestore_admin.GetBackupRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = backup.Backup.to_json(backup.Backup()) - - request = firestore_admin.GetBackupRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = backup.Backup() - - client.get_backup( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_backup_rest_bad_request( - transport: str = "rest", request_type=firestore_admin.GetBackupRequest -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_backup(request) + unset_fields = transport.list_backups._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter",)) & set(("parent",))) -def test_get_backup_rest_flattened(): +def test_list_backups_rest_flattened(): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15849,14 +12579,14 @@ def test_get_backup_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backup.Backup() + return_value = firestore_admin.ListBackupsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/backups/sample3"} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -15864,24 +12594,25 @@ def test_get_backup_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = backup.Backup.pb(return_value) + return_value = firestore_admin.ListBackupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_backup(**mock_args) + client.list_backups(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/backups/*}" % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/backups" % client.transport._host, args[1], ) -def test_get_backup_rest_flattened_error(transport: str = "rest"): +def test_list_backups_rest_flattened_error(transport: str = "rest"): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15890,59 +12621,13 @@ def test_get_backup_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_backup( - firestore_admin.GetBackupRequest(), - name="name_value", - ) - - -def test_get_backup_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.ListBackupsRequest, - dict, - ], -) -def test_list_backups_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListBackupsResponse( - unreachable=["unreachable_value"], + client.list_backups( + firestore_admin.ListBackupsRequest(), + parent="parent_value", ) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore_admin.ListBackupsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_backups(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore_admin.ListBackupsResponse) - assert response.unreachable == ["unreachable_value"] - -def test_list_backups_rest_use_cached_wrapped_rpc(): +def test_delete_backup_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15956,35 +12641,35 @@ def test_list_backups_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_backups in client._transport._wrapped_methods + assert client._transport.delete_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc request = {} - client.list_backups(request) + client.delete_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_backups(request) + client.delete_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_backups_rest_required_fields( - request_type=firestore_admin.ListBackupsRequest, +def test_delete_backup_rest_required_fields( + request_type=firestore_admin.DeleteBackupRequest, ): transport_class = transports.FirestoreAdminRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15995,21 +12680,21 @@ def test_list_backups_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_backups._get_unset_required_fields(jsonified_request) + ).delete_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_backups._get_unset_required_fields(jsonified_request) + ).delete_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16018,7 +12703,7 @@ def test_list_backups_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListBackupsResponse() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16030,119 +12715,36 @@ def test_list_backups_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore_admin.ListBackupsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backups(request) + response = client.delete_backup(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_backups_rest_unset_required_fields(): +def test_delete_backup_rest_unset_required_fields(): transport = transports.FirestoreAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_backups._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_backups_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_list_backups" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_list_backups" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.ListBackupsRequest.pb( - firestore_admin.ListBackupsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = firestore_admin.ListBackupsResponse.to_json( - firestore_admin.ListBackupsResponse() - ) - - request = firestore_admin.ListBackupsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore_admin.ListBackupsResponse() - - client.list_backups( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_backups_rest_bad_request( - transport: str = "rest", request_type=firestore_admin.ListBackupsRequest -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_backups(request) + unset_fields = transport.delete_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_list_backups_rest_flattened(): +def test_delete_backup_rest_flattened(): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16151,39 +12753,38 @@ def test_list_backups_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListBackupsResponse() + return_value = None # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = {"name": "projects/sample1/locations/sample2/backups/sample3"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore_admin.ListBackupsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backups(**mock_args) + client.delete_backup(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/backups" % client.transport._host, + "%s/v1/{name=projects/*/locations/*/backups/*}" % client.transport._host, args[1], ) -def test_list_backups_rest_flattened_error(transport: str = "rest"): +def test_delete_backup_rest_flattened_error(transport: str = "rest"): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16192,54 +12793,13 @@ def test_list_backups_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_backups( - firestore_admin.ListBackupsRequest(), - parent="parent_value", + client.delete_backup( + firestore_admin.DeleteBackupRequest(), + name="name_value", ) -def test_list_backups_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.DeleteBackupRequest, - dict, - ], -) -def test_delete_backup_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_backup(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_backup_rest_use_cached_wrapped_rpc(): +def test_restore_database_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16253,35 +12813,43 @@ def test_delete_backup_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_backup in client._transport._wrapped_methods + assert client._transport.restore_database in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc + client._transport._wrapped_methods[ + client._transport.restore_database + ] = mock_rpc request = {} - client.delete_backup(request) + client.restore_database(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_backup(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.restore_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_backup_rest_required_fields( - request_type=firestore_admin.DeleteBackupRequest, +def test_restore_database_rest_required_fields( + request_type=firestore_admin.RestoreDatabaseRequest, ): transport_class = transports.FirestoreAdminRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" + request_init["database_id"] = "" + request_init["backup"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16292,21 +12860,27 @@ def test_delete_backup_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_backup._get_unset_required_fields(jsonified_request) + ).restore_database._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["databaseId"] = "database_id_value" + jsonified_request["backup"] = "backup_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_backup._get_unset_required_fields(jsonified_request) + ).restore_database._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "databaseId" in jsonified_request + assert jsonified_request["databaseId"] == "database_id_value" + assert "backup" in jsonified_request + assert jsonified_request["backup"] == "backup_value" client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16315,7 +12889,7 @@ def test_delete_backup_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16327,108 +12901,180 @@ def test_delete_backup_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_backup(request) + response = client.restore_database(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_backup_rest_unset_required_fields(): +def test_restore_database_rest_unset_required_fields(): transport = transports.FirestoreAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_backup._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.restore_database._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "databaseId", + "backup", + ) + ) + ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_backup_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_delete_backup" - ) as pre: - pre.assert_not_called() - pb_message = firestore_admin.DeleteBackupRequest.pb( - firestore_admin.DeleteBackupRequest() +def test_create_backup_schedule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - request = firestore_admin.DeleteBackupRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - client.delete_backup( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], + # Ensure method has been cached + assert ( + client._transport.create_backup_schedule + in client._transport._wrapped_methods ) - pre.assert_called_once() + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_backup_schedule + ] = mock_rpc + request = {} + client.create_backup_schedule(request) -def test_delete_backup_rest_bad_request( - transport: str = "rest", request_type=firestore_admin.DeleteBackupRequest + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_backup_schedule_rest_required_fields( + request_type=firestore_admin.CreateBackupScheduleRequest, ): + transport_class = transports.FirestoreAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_schedule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_schedule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} request = request_type(**request_init) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_backup(request) + # Designate an appropriate value for the returned response. + return_value = schedule.BackupSchedule() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} -def test_delete_backup_rest_flattened(): + response = client.create_backup_schedule(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_backup_schedule_rest_unset_required_fields(): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_backup_schedule._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "backupSchedule", + ) + ) + ) + + +def test_create_backup_schedule_rest_flattened(): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16437,37 +13083,42 @@ def test_delete_backup_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = schedule.BackupSchedule() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/backups/sample3"} + sample_request = {"parent": "projects/sample1/databases/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + backup_schedule=schedule.BackupSchedule(name="name_value"), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_backup(**mock_args) + client.create_backup_schedule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/backups/*}" % client.transport._host, + "%s/v1/{parent=projects/*/databases/*}/backupSchedules" + % client.transport._host, args[1], ) -def test_delete_backup_rest_flattened_error(transport: str = "rest"): +def test_create_backup_schedule_rest_flattened_error(transport: str = "rest"): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16476,54 +13127,14 @@ def test_delete_backup_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_backup( - firestore_admin.DeleteBackupRequest(), - name="name_value", + client.create_backup_schedule( + firestore_admin.CreateBackupScheduleRequest(), + parent="parent_value", + backup_schedule=schedule.BackupSchedule(name="name_value"), ) -def test_delete_backup_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.RestoreDatabaseRequest, - dict, - ], -) -def test_restore_database_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.restore_database(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_restore_database_rest_use_cached_wrapped_rpc(): +def test_get_backup_schedule_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16537,7 +13148,9 @@ def test_restore_database_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.restore_database in client._transport._wrapped_methods + assert ( + client._transport.get_backup_schedule in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -16545,35 +13158,29 @@ def test_restore_database_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.restore_database + client._transport.get_backup_schedule ] = mock_rpc request = {} - client.restore_database(request) + client.get_backup_schedule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.restore_database(request) + client.get_backup_schedule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_restore_database_rest_required_fields( - request_type=firestore_admin.RestoreDatabaseRequest, +def test_get_backup_schedule_rest_required_fields( + request_type=firestore_admin.GetBackupScheduleRequest, ): transport_class = transports.FirestoreAdminRestTransport request_init = {} - request_init["parent"] = "" - request_init["database_id"] = "" - request_init["backup"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16584,27 +13191,21 @@ def test_restore_database_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).restore_database._get_unset_required_fields(jsonified_request) + ).get_backup_schedule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - jsonified_request["databaseId"] = "database_id_value" - jsonified_request["backup"] = "backup_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).restore_database._get_unset_required_fields(jsonified_request) + ).get_backup_schedule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "databaseId" in jsonified_request - assert jsonified_request["databaseId"] == "database_id_value" - assert "backup" in jsonified_request - assert jsonified_request["backup"] == "backup_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16613,7 +13214,7 @@ def test_restore_database_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = schedule.BackupSchedule() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16625,233 +13226,59 @@ def test_restore_database_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = schedule.BackupSchedule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.restore_database(request) + response = client.get_backup_schedule(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_restore_database_rest_unset_required_fields(): +def test_get_backup_schedule_rest_unset_required_fields(): transport = transports.FirestoreAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.restore_database._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "databaseId", - "backup", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_restore_database_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_restore_database" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_restore_database" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.RestoreDatabaseRequest.pb( - firestore_admin.RestoreDatabaseRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - - request = firestore_admin.RestoreDatabaseRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.restore_database( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_restore_database_rest_bad_request( - transport: str = "rest", request_type=firestore_admin.RestoreDatabaseRequest -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.restore_database(request) - - -def test_restore_database_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + unset_fields = transport.get_backup_schedule._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.CreateBackupScheduleRequest, - dict, - ], -) -def test_create_backup_schedule_rest(request_type): +def test_get_backup_schedule_rest_flattened(): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/databases/sample2"} - request_init["backup_schedule"] = { - "name": "name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "retention": {"seconds": 751, "nanos": 543}, - "daily_recurrence": {}, - "weekly_recurrence": {"day": 1}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = firestore_admin.CreateBackupScheduleRequest.meta.fields[ - "backup_schedule" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup_schedule"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["backup_schedule"][field])): - del request_init["backup_schedule"][field][i][subfield] - else: - del request_init["backup_schedule"][field][subfield] - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = schedule.BackupSchedule( + return_value = schedule.BackupSchedule() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/databases/sample2/backupSchedules/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() @@ -16859,17 +13286,39 @@ def get_message_fields(field): # Convert return value to protobuf type return_value = schedule.BackupSchedule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_backup_schedule(request) + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the response is the type that we expect. - assert isinstance(response, schedule.BackupSchedule) - assert response.name == "name_value" + client.get_backup_schedule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/databases/*/backupSchedules/*}" + % client.transport._host, + args[1], + ) -def test_create_backup_schedule_rest_use_cached_wrapped_rpc(): +def test_get_backup_schedule_rest_flattened_error(transport: str = "rest"): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_schedule( + firestore_admin.GetBackupScheduleRequest(), + name="name_value", + ) + + +def test_list_backup_schedules_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16884,7 +13333,7 @@ def test_create_backup_schedule_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_backup_schedule + client._transport.list_backup_schedules in client._transport._wrapped_methods ) @@ -16894,24 +13343,24 @@ def test_create_backup_schedule_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_backup_schedule + client._transport.list_backup_schedules ] = mock_rpc request = {} - client.create_backup_schedule(request) + client.list_backup_schedules(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_backup_schedule(request) + client.list_backup_schedules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_backup_schedule_rest_required_fields( - request_type=firestore_admin.CreateBackupScheduleRequest, +def test_list_backup_schedules_rest_required_fields( + request_type=firestore_admin.ListBackupSchedulesRequest, ): transport_class = transports.FirestoreAdminRestTransport @@ -16927,7 +13376,7 @@ def test_create_backup_schedule_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_backup_schedule._get_unset_required_fields(jsonified_request) + ).list_backup_schedules._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -16936,7 +13385,7 @@ def test_create_backup_schedule_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_backup_schedule._get_unset_required_fields(jsonified_request) + ).list_backup_schedules._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -16950,7 +13399,7 @@ def test_create_backup_schedule_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = schedule.BackupSchedule() + return_value = firestore_admin.ListBackupSchedulesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16962,128 +13411,39 @@ def test_create_backup_schedule_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = schedule.BackupSchedule.pb(return_value) + return_value = firestore_admin.ListBackupSchedulesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_backup_schedule(request) + response = client.list_backup_schedules(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_backup_schedule_rest_unset_required_fields(): +def test_list_backup_schedules_rest_unset_required_fields(): transport = transports.FirestoreAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_backup_schedule._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "backupSchedule", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_backup_schedule_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_create_backup_schedule" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_create_backup_schedule" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.CreateBackupScheduleRequest.pb( - firestore_admin.CreateBackupScheduleRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = schedule.BackupSchedule.to_json( - schedule.BackupSchedule() - ) - - request = firestore_admin.CreateBackupScheduleRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = schedule.BackupSchedule() - - client.create_backup_schedule( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_backup_schedule_rest_bad_request( - transport: str = "rest", request_type=firestore_admin.CreateBackupScheduleRequest -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_backup_schedule(request) + unset_fields = transport.list_backup_schedules._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) -def test_create_backup_schedule_rest_flattened(): +def test_list_backup_schedules_rest_flattened(): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17092,7 +13452,7 @@ def test_create_backup_schedule_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = schedule.BackupSchedule() + return_value = firestore_admin.ListBackupSchedulesResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/databases/sample2"} @@ -17100,7 +13460,6 @@ def test_create_backup_schedule_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - backup_schedule=schedule.BackupSchedule(name="name_value"), ) mock_args.update(sample_request) @@ -17108,12 +13467,13 @@ def test_create_backup_schedule_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = schedule.BackupSchedule.pb(return_value) + return_value = firestore_admin.ListBackupSchedulesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_backup_schedule(**mock_args) + client.list_backup_schedules(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -17126,7 +13486,7 @@ def test_create_backup_schedule_rest_flattened(): ) -def test_create_backup_schedule_rest_flattened_error(transport: str = "rest"): +def test_list_backup_schedules_rest_flattened_error(transport: str = "rest"): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17135,62 +13495,13 @@ def test_create_backup_schedule_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_backup_schedule( - firestore_admin.CreateBackupScheduleRequest(), + client.list_backup_schedules( + firestore_admin.ListBackupSchedulesRequest(), parent="parent_value", - backup_schedule=schedule.BackupSchedule(name="name_value"), - ) - - -def test_create_backup_schedule_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.GetBackupScheduleRequest, - dict, - ], -) -def test_get_backup_schedule_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/databases/sample2/backupSchedules/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = schedule.BackupSchedule( - name="name_value", ) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_backup_schedule(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, schedule.BackupSchedule) - assert response.name == "name_value" - -def test_get_backup_schedule_rest_use_cached_wrapped_rpc(): +def test_update_backup_schedule_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17205,7 +13516,8 @@ def test_get_backup_schedule_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_backup_schedule in client._transport._wrapped_methods + client._transport.update_backup_schedule + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -17214,29 +13526,28 @@ def test_get_backup_schedule_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_backup_schedule + client._transport.update_backup_schedule ] = mock_rpc request = {} - client.get_backup_schedule(request) + client.update_backup_schedule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_backup_schedule(request) + client.update_backup_schedule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_backup_schedule_rest_required_fields( - request_type=firestore_admin.GetBackupScheduleRequest, +def test_update_backup_schedule_rest_required_fields( + request_type=firestore_admin.UpdateBackupScheduleRequest, ): transport_class = transports.FirestoreAdminRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17247,21 +13558,19 @@ def test_get_backup_schedule_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_backup_schedule._get_unset_required_fields(jsonified_request) + ).update_backup_schedule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_backup_schedule._get_unset_required_fields(jsonified_request) + ).update_backup_schedule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17282,9 +13591,10 @@ def test_get_backup_schedule_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -17296,151 +13606,73 @@ def test_get_backup_schedule_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_backup_schedule(request) + response = client.update_backup_schedule(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_backup_schedule_rest_unset_required_fields(): +def test_update_backup_schedule_rest_unset_required_fields(): transport = transports.FirestoreAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_backup_schedule._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_backup_schedule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("backupSchedule",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_backup_schedule_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( +def test_update_backup_schedule_rest_flattened(): + client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), + transport="rest", ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_get_backup_schedule" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_get_backup_schedule" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.GetBackupScheduleRequest.pb( - firestore_admin.GetBackupScheduleRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = schedule.BackupSchedule.to_json( - schedule.BackupSchedule() - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schedule.BackupSchedule() - request = firestore_admin.GetBackupScheduleRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = schedule.BackupSchedule() + # get arguments that satisfy an http rule for this method + sample_request = { + "backup_schedule": { + "name": "projects/sample1/databases/sample2/backupSchedules/sample3" + } + } - client.get_backup_schedule( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], + # get truthy value for each flattened field + mock_args = dict( + backup_schedule=schedule.BackupSchedule(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + mock_args.update(sample_request) - pre.assert_called_once() - post.assert_called_once() - + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} -def test_get_backup_schedule_rest_bad_request( - transport: str = "rest", request_type=firestore_admin.GetBackupScheduleRequest -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/databases/sample2/backupSchedules/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_backup_schedule(request) - - -def test_get_backup_schedule_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = schedule.BackupSchedule() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/databases/sample2/backupSchedules/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_backup_schedule(**mock_args) + client.update_backup_schedule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/databases/*/backupSchedules/*}" + "%s/v1/{backup_schedule.name=projects/*/databases/*/backupSchedules/*}" % client.transport._host, args[1], ) -def test_get_backup_schedule_rest_flattened_error(transport: str = "rest"): +def test_update_backup_schedule_rest_flattened_error(transport: str = "rest"): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17449,56 +13681,14 @@ def test_get_backup_schedule_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_backup_schedule( - firestore_admin.GetBackupScheduleRequest(), - name="name_value", + client.update_backup_schedule( + firestore_admin.UpdateBackupScheduleRequest(), + backup_schedule=schedule.BackupSchedule(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_get_backup_schedule_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.ListBackupSchedulesRequest, - dict, - ], -) -def test_list_backup_schedules_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListBackupSchedulesResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore_admin.ListBackupSchedulesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_backup_schedules(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore_admin.ListBackupSchedulesResponse) - - -def test_list_backup_schedules_rest_use_cached_wrapped_rpc(): +def test_delete_backup_schedule_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17513,7 +13703,7 @@ def test_list_backup_schedules_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_backup_schedules + client._transport.delete_backup_schedule in client._transport._wrapped_methods ) @@ -17523,29 +13713,29 @@ def test_list_backup_schedules_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_backup_schedules + client._transport.delete_backup_schedule ] = mock_rpc request = {} - client.list_backup_schedules(request) + client.delete_backup_schedule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_backup_schedules(request) + client.delete_backup_schedule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_backup_schedules_rest_required_fields( - request_type=firestore_admin.ListBackupSchedulesRequest, +def test_delete_backup_schedule_rest_required_fields( + request_type=firestore_admin.DeleteBackupScheduleRequest, ): transport_class = transports.FirestoreAdminRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17556,21 +13746,21 @@ def test_list_backup_schedules_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_backup_schedules._get_unset_required_fields(jsonified_request) + ).delete_backup_schedule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_backup_schedules._get_unset_required_fields(jsonified_request) + ).delete_backup_schedule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17579,7 +13769,7 @@ def test_list_backup_schedules_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListBackupSchedulesResponse() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17591,119 +13781,36 @@ def test_list_backup_schedules_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore_admin.ListBackupSchedulesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backup_schedules(request) + response = client.delete_backup_schedule(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_backup_schedules_rest_unset_required_fields(): +def test_delete_backup_schedule_rest_unset_required_fields(): transport = transports.FirestoreAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_backup_schedules._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_backup_schedules_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_list_backup_schedules" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_list_backup_schedules" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.ListBackupSchedulesRequest.pb( - firestore_admin.ListBackupSchedulesRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = firestore_admin.ListBackupSchedulesResponse.to_json( - firestore_admin.ListBackupSchedulesResponse() - ) - - request = firestore_admin.ListBackupSchedulesRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore_admin.ListBackupSchedulesResponse() - - client.list_backup_schedules( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_backup_schedules_rest_bad_request( - transport: str = "rest", request_type=firestore_admin.ListBackupSchedulesRequest -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_backup_schedules(request) + unset_fields = transport.delete_backup_schedule._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_list_backup_schedules_rest_flattened(): +def test_delete_backup_schedule_rest_flattened(): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17712,40 +13819,41 @@ def test_list_backup_schedules_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListBackupSchedulesResponse() + return_value = None # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/databases/sample2"} + sample_request = { + "name": "projects/sample1/databases/sample2/backupSchedules/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore_admin.ListBackupSchedulesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backup_schedules(**mock_args) + client.delete_backup_schedule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/databases/*}/backupSchedules" + "%s/v1/{name=projects/*/databases/*/backupSchedules/*}" % client.transport._host, args[1], ) -def test_list_backup_schedules_rest_flattened_error(transport: str = "rest"): +def test_delete_backup_schedule_rest_flattened_error(transport: str = "rest"): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17754,815 +13862,5509 @@ def test_list_backup_schedules_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_backup_schedules( - firestore_admin.ListBackupSchedulesRequest(), - parent="parent_value", + client.delete_backup_schedule( + firestore_admin.DeleteBackupScheduleRequest(), + name="name_value", ) -def test_list_backup_schedules_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.FirestoreAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), ) + with pytest.raises(ValueError): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.FirestoreAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FirestoreAdminClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.FirestoreAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = FirestoreAdminClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = FirestoreAdminClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.FirestoreAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FirestoreAdminClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.FirestoreAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = FirestoreAdminClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.FirestoreAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.FirestoreAdminGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.FirestoreAdminGrpcTransport, + transports.FirestoreAdminGrpcAsyncIOTransport, + transports.FirestoreAdminRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = FirestoreAdminClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_index_empty_call_grpc(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_index), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_index(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.CreateIndexRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_indexes_empty_call_grpc(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: + call.return_value = firestore_admin.ListIndexesResponse() + client.list_indexes(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.ListIndexesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_index_empty_call_grpc(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_index), "__call__") as call: + call.return_value = index.Index() + client.get_index(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.GetIndexRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_index_empty_call_grpc(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_index), "__call__") as call: + call.return_value = None + client.delete_index(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.DeleteIndexRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_field_empty_call_grpc(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_field), "__call__") as call: + call.return_value = field.Field() + client.get_field(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.GetFieldRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_field_empty_call_grpc(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_field), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_field(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.UpdateFieldRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_fields_empty_call_grpc(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_fields), "__call__") as call: + call.return_value = firestore_admin.ListFieldsResponse() + client.list_fields(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.ListFieldsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_export_documents_empty_call_grpc(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.export_documents), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.export_documents(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.ExportDocumentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_import_documents_empty_call_grpc(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.import_documents), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.import_documents(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.ImportDocumentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_bulk_delete_documents_empty_call_grpc(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.bulk_delete_documents), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.bulk_delete_documents(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.BulkDeleteDocumentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_database_empty_call_grpc(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.CreateDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_database_empty_call_grpc(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_database), "__call__") as call: + call.return_value = database.Database() + client.get_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.GetDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_databases_empty_call_grpc(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + call.return_value = firestore_admin.ListDatabasesResponse() + client.list_databases(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.ListDatabasesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_database_empty_call_grpc(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_database), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.UpdateDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_database_empty_call_grpc(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_database), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.DeleteDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_backup_empty_call_grpc(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value = backup.Backup() + client.get_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.GetBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_backups_empty_call_grpc(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value = firestore_admin.ListBackupsResponse() + client.list_backups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.ListBackupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_backup_empty_call_grpc(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value = None + client.delete_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.DeleteBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_restore_database_empty_call_grpc(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.restore_database), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.restore_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.RestoreDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_backup_schedule_empty_call_grpc(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), "__call__" + ) as call: + call.return_value = schedule.BackupSchedule() + client.create_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.CreateBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_backup_schedule_empty_call_grpc(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), "__call__" + ) as call: + call.return_value = schedule.BackupSchedule() + client.get_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.GetBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_backup_schedules_empty_call_grpc(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), "__call__" + ) as call: + call.return_value = firestore_admin.ListBackupSchedulesResponse() + client.list_backup_schedules(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.ListBackupSchedulesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_backup_schedule_empty_call_grpc(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), "__call__" + ) as call: + call.return_value = schedule.BackupSchedule() + client.update_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.UpdateBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_backup_schedule_empty_call_grpc(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), "__call__" + ) as call: + call.return_value = None + client.delete_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.DeleteBackupScheduleRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = FirestoreAdminAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = FirestoreAdminAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_index_empty_call_grpc_asyncio(): + client = FirestoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_index), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_index(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.CreateIndexRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_indexes_empty_call_grpc_asyncio(): + client = FirestoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + firestore_admin.ListIndexesResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_indexes(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.ListIndexesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_index_empty_call_grpc_asyncio(): + client = FirestoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_index), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + index.Index( + name="name_value", + query_scope=index.Index.QueryScope.COLLECTION, + api_scope=index.Index.ApiScope.DATASTORE_MODE_API, + state=index.Index.State.CREATING, + ) + ) + await client.get_index(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.GetIndexRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_index_empty_call_grpc_asyncio(): + client = FirestoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_index), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_index(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.DeleteIndexRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_field_empty_call_grpc_asyncio(): + client = FirestoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_field), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + field.Field( + name="name_value", + ) + ) + await client.get_field(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.GetFieldRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_field_empty_call_grpc_asyncio(): + client = FirestoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_field), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_field(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.UpdateFieldRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_fields_empty_call_grpc_asyncio(): + client = FirestoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_fields), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + firestore_admin.ListFieldsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_fields(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.ListFieldsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_export_documents_empty_call_grpc_asyncio(): + client = FirestoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.export_documents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.export_documents(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.ExportDocumentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_import_documents_empty_call_grpc_asyncio(): + client = FirestoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.import_documents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.import_documents(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.ImportDocumentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_bulk_delete_documents_empty_call_grpc_asyncio(): + client = FirestoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.bulk_delete_documents), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.bulk_delete_documents(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.BulkDeleteDocumentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_database_empty_call_grpc_asyncio(): + client = FirestoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.CreateDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_database_empty_call_grpc_asyncio(): + client = FirestoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + database.Database( + name="name_value", + uid="uid_value", + location_id="location_id_value", + type_=database.Database.DatabaseType.FIRESTORE_NATIVE, + concurrency_mode=database.Database.ConcurrencyMode.OPTIMISTIC, + point_in_time_recovery_enablement=database.Database.PointInTimeRecoveryEnablement.POINT_IN_TIME_RECOVERY_ENABLED, + app_engine_integration_mode=database.Database.AppEngineIntegrationMode.ENABLED, + key_prefix="key_prefix_value", + delete_protection_state=database.Database.DeleteProtectionState.DELETE_PROTECTION_DISABLED, + previous_id="previous_id_value", + etag="etag_value", + ) + ) + await client.get_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.GetDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_databases_empty_call_grpc_asyncio(): + client = FirestoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + firestore_admin.ListDatabasesResponse( + unreachable=["unreachable_value"], + ) + ) + await client.list_databases(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.ListDatabasesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_database_empty_call_grpc_asyncio(): + client = FirestoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.UpdateDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_database_empty_call_grpc_asyncio(): + client = FirestoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.DeleteDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_backup_empty_call_grpc_asyncio(): + client = FirestoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backup.Backup( + name="name_value", + database="database_value", + database_uid="database_uid_value", + state=backup.Backup.State.CREATING, + ) + ) + await client.get_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.GetBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_backups_empty_call_grpc_asyncio(): + client = FirestoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + firestore_admin.ListBackupsResponse( + unreachable=["unreachable_value"], + ) + ) + await client.list_backups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.ListBackupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_backup_empty_call_grpc_asyncio(): + client = FirestoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.DeleteBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_restore_database_empty_call_grpc_asyncio(): + client = FirestoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.restore_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.restore_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.RestoreDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_backup_schedule_empty_call_grpc_asyncio(): + client = FirestoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schedule.BackupSchedule( + name="name_value", + ) + ) + await client.create_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.CreateBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_backup_schedule_empty_call_grpc_asyncio(): + client = FirestoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schedule.BackupSchedule( + name="name_value", + ) + ) + await client.get_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.GetBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_backup_schedules_empty_call_grpc_asyncio(): + client = FirestoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + firestore_admin.ListBackupSchedulesResponse() + ) + await client.list_backup_schedules(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.ListBackupSchedulesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_backup_schedule_empty_call_grpc_asyncio(): + client = FirestoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schedule.BackupSchedule( + name="name_value", + ) + ) + await client.update_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.UpdateBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_backup_schedule_empty_call_grpc_asyncio(): + client = FirestoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.DeleteBackupScheduleRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = FirestoreAdminClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_create_index_rest_bad_request(request_type=firestore_admin.CreateIndexRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/databases/sample2/collectionGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_index(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore_admin.CreateIndexRequest, + dict, + ], +) +def test_create_index_rest_call_success(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/databases/sample2/collectionGroups/sample3" + } + request_init["index"] = { + "name": "name_value", + "query_scope": 1, + "api_scope": 1, + "fields": [ + { + "field_path": "field_path_value", + "order": 1, + "array_config": 1, + "vector_config": {"dimension": 966, "flat": {}}, + } + ], + "state": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = firestore_admin.CreateIndexRequest.meta.fields["index"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["index"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["index"][field])): + del request_init["index"][field][i][subfield] + else: + del request_init["index"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_index(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_index_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_create_index" + ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "pre_create_index" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.CreateIndexRequest.pb( + firestore_admin.CreateIndexRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = firestore_admin.CreateIndexRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_index( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_indexes_rest_bad_request(request_type=firestore_admin.ListIndexesRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/databases/sample2/collectionGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_indexes(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore_admin.ListIndexesRequest, + dict, + ], +) +def test_list_indexes_rest_call_success(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/databases/sample2/collectionGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = firestore_admin.ListIndexesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore_admin.ListIndexesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_indexes(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListIndexesPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_indexes_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_list_indexes" + ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "pre_list_indexes" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.ListIndexesRequest.pb( + firestore_admin.ListIndexesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = firestore_admin.ListIndexesResponse.to_json( + firestore_admin.ListIndexesResponse() + ) + req.return_value.content = return_value + + request = firestore_admin.ListIndexesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore_admin.ListIndexesResponse() + + client.list_indexes( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_index_rest_bad_request(request_type=firestore_admin.GetIndexRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/databases/sample2/collectionGroups/sample3/indexes/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_index(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore_admin.GetIndexRequest, + dict, + ], +) +def test_get_index_rest_call_success(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/databases/sample2/collectionGroups/sample3/indexes/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = index.Index( + name="name_value", + query_scope=index.Index.QueryScope.COLLECTION, + api_scope=index.Index.ApiScope.DATASTORE_MODE_API, + state=index.Index.State.CREATING, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = index.Index.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_index(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, index.Index) + assert response.name == "name_value" + assert response.query_scope == index.Index.QueryScope.COLLECTION + assert response.api_scope == index.Index.ApiScope.DATASTORE_MODE_API + assert response.state == index.Index.State.CREATING + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_index_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_get_index" + ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "pre_get_index" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.GetIndexRequest.pb( + firestore_admin.GetIndexRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = index.Index.to_json(index.Index()) + req.return_value.content = return_value + + request = firestore_admin.GetIndexRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = index.Index() + + client.get_index( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_index_rest_bad_request(request_type=firestore_admin.DeleteIndexRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/databases/sample2/collectionGroups/sample3/indexes/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_index(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore_admin.DeleteIndexRequest, + dict, + ], +) +def test_delete_index_rest_call_success(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/databases/sample2/collectionGroups/sample3/indexes/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_index(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_index_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "pre_delete_index" + ) as pre: + pre.assert_not_called() + pb_message = firestore_admin.DeleteIndexRequest.pb( + firestore_admin.DeleteIndexRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = firestore_admin.DeleteIndexRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_index( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_get_field_rest_bad_request(request_type=firestore_admin.GetFieldRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_field(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore_admin.GetFieldRequest, + dict, + ], +) +def test_get_field_rest_call_success(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = field.Field( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = field.Field.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_field(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, field.Field) + assert response.name == "name_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_field_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_get_field" + ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "pre_get_field" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.GetFieldRequest.pb( + firestore_admin.GetFieldRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = field.Field.to_json(field.Field()) + req.return_value.content = return_value + + request = firestore_admin.GetFieldRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = field.Field() + + client.get_field( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_field_rest_bad_request(request_type=firestore_admin.UpdateFieldRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "field": { + "name": "projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_field(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore_admin.UpdateFieldRequest, + dict, + ], +) +def test_update_field_rest_call_success(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "field": { + "name": "projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4" + } + } + request_init["field"] = { + "name": "projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4", + "index_config": { + "indexes": [ + { + "name": "name_value", + "query_scope": 1, + "api_scope": 1, + "fields": [ + { + "field_path": "field_path_value", + "order": 1, + "array_config": 1, + "vector_config": {"dimension": 966, "flat": {}}, + } + ], + "state": 1, + } + ], + "uses_ancestor_config": True, + "ancestor_field": "ancestor_field_value", + "reverting": True, + }, + "ttl_config": {"state": 1}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = firestore_admin.UpdateFieldRequest.meta.fields["field"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["field"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["field"][field])): + del request_init["field"][field][i][subfield] + else: + del request_init["field"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_field(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_field_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_update_field" + ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "pre_update_field" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.UpdateFieldRequest.pb( + firestore_admin.UpdateFieldRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = firestore_admin.UpdateFieldRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_field( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_fields_rest_bad_request(request_type=firestore_admin.ListFieldsRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/databases/sample2/collectionGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_fields(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore_admin.ListFieldsRequest, + dict, + ], +) +def test_list_fields_rest_call_success(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/databases/sample2/collectionGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = firestore_admin.ListFieldsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore_admin.ListFieldsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_fields(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListFieldsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_fields_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_list_fields" + ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "pre_list_fields" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.ListFieldsRequest.pb( + firestore_admin.ListFieldsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = firestore_admin.ListFieldsResponse.to_json( + firestore_admin.ListFieldsResponse() + ) + req.return_value.content = return_value + + request = firestore_admin.ListFieldsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore_admin.ListFieldsResponse() + + client.list_fields( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_export_documents_rest_bad_request( + request_type=firestore_admin.ExportDocumentsRequest, +): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/databases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.export_documents(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore_admin.ExportDocumentsRequest, + dict, + ], +) +def test_export_documents_rest_call_success(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/databases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.export_documents(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_export_documents_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_export_documents" + ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "pre_export_documents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.ExportDocumentsRequest.pb( + firestore_admin.ExportDocumentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = firestore_admin.ExportDocumentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.export_documents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_import_documents_rest_bad_request( + request_type=firestore_admin.ImportDocumentsRequest, +): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/databases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.import_documents(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore_admin.ImportDocumentsRequest, + dict, + ], +) +def test_import_documents_rest_call_success(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/databases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.import_documents(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_import_documents_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_import_documents" + ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "pre_import_documents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.ImportDocumentsRequest.pb( + firestore_admin.ImportDocumentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = firestore_admin.ImportDocumentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.import_documents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_bulk_delete_documents_rest_bad_request( + request_type=firestore_admin.BulkDeleteDocumentsRequest, +): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/databases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.bulk_delete_documents(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore_admin.BulkDeleteDocumentsRequest, + dict, + ], +) +def test_bulk_delete_documents_rest_call_success(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/databases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.bulk_delete_documents(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_bulk_delete_documents_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_bulk_delete_documents" + ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "pre_bulk_delete_documents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.BulkDeleteDocumentsRequest.pb( + firestore_admin.BulkDeleteDocumentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = firestore_admin.BulkDeleteDocumentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.bulk_delete_documents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_database_rest_bad_request( + request_type=firestore_admin.CreateDatabaseRequest, +): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_database(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore_admin.CreateDatabaseRequest, + dict, + ], +) +def test_create_database_rest_call_success(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request_init["database"] = { + "name": "name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "location_id": "location_id_value", + "type_": 1, + "concurrency_mode": 1, + "version_retention_period": {"seconds": 751, "nanos": 543}, + "earliest_version_time": {}, + "point_in_time_recovery_enablement": 1, + "app_engine_integration_mode": 1, + "key_prefix": "key_prefix_value", + "delete_protection_state": 1, + "cmek_config": { + "kms_key_name": "kms_key_name_value", + "active_key_version": [ + "active_key_version_value1", + "active_key_version_value2", + ], + }, + "previous_id": "previous_id_value", + "source_info": { + "backup": {"backup": "backup_value"}, + "operation": "operation_value", + }, + "etag": "etag_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = firestore_admin.CreateDatabaseRequest.meta.fields["database"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["database"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["database"][field])): + del request_init["database"][field][i][subfield] + else: + del request_init["database"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_database(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_database_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_create_database" + ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "pre_create_database" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.CreateDatabaseRequest.pb( + firestore_admin.CreateDatabaseRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = firestore_admin.CreateDatabaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_database_rest_bad_request(request_type=firestore_admin.GetDatabaseRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/databases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_database(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore_admin.GetDatabaseRequest, + dict, + ], +) +def test_get_database_rest_call_success(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/databases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = database.Database( + name="name_value", + uid="uid_value", + location_id="location_id_value", + type_=database.Database.DatabaseType.FIRESTORE_NATIVE, + concurrency_mode=database.Database.ConcurrencyMode.OPTIMISTIC, + point_in_time_recovery_enablement=database.Database.PointInTimeRecoveryEnablement.POINT_IN_TIME_RECOVERY_ENABLED, + app_engine_integration_mode=database.Database.AppEngineIntegrationMode.ENABLED, + key_prefix="key_prefix_value", + delete_protection_state=database.Database.DeleteProtectionState.DELETE_PROTECTION_DISABLED, + previous_id="previous_id_value", + etag="etag_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = database.Database.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_database(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, database.Database) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.location_id == "location_id_value" + assert response.type_ == database.Database.DatabaseType.FIRESTORE_NATIVE + assert response.concurrency_mode == database.Database.ConcurrencyMode.OPTIMISTIC + assert ( + response.point_in_time_recovery_enablement + == database.Database.PointInTimeRecoveryEnablement.POINT_IN_TIME_RECOVERY_ENABLED + ) + assert ( + response.app_engine_integration_mode + == database.Database.AppEngineIntegrationMode.ENABLED + ) + assert response.key_prefix == "key_prefix_value" + assert ( + response.delete_protection_state + == database.Database.DeleteProtectionState.DELETE_PROTECTION_DISABLED + ) + assert response.previous_id == "previous_id_value" + assert response.etag == "etag_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_database_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_get_database" + ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "pre_get_database" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.GetDatabaseRequest.pb( + firestore_admin.GetDatabaseRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = database.Database.to_json(database.Database()) + req.return_value.content = return_value + + request = firestore_admin.GetDatabaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = database.Database() + + client.get_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_databases_rest_bad_request( + request_type=firestore_admin.ListDatabasesRequest, +): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_databases(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore_admin.ListDatabasesRequest, + dict, + ], +) +def test_list_databases_rest_call_success(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = firestore_admin.ListDatabasesResponse( + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore_admin.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_databases(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore_admin.ListDatabasesResponse) + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_databases_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_list_databases" + ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "pre_list_databases" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.ListDatabasesRequest.pb( + firestore_admin.ListDatabasesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = firestore_admin.ListDatabasesResponse.to_json( + firestore_admin.ListDatabasesResponse() + ) + req.return_value.content = return_value + + request = firestore_admin.ListDatabasesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore_admin.ListDatabasesResponse() + + client.list_databases( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_database_rest_bad_request( + request_type=firestore_admin.UpdateDatabaseRequest, +): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"database": {"name": "projects/sample1/databases/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_database(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore_admin.UpdateDatabaseRequest, + dict, + ], +) +def test_update_database_rest_call_success(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"database": {"name": "projects/sample1/databases/sample2"}} + request_init["database"] = { + "name": "projects/sample1/databases/sample2", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "location_id": "location_id_value", + "type_": 1, + "concurrency_mode": 1, + "version_retention_period": {"seconds": 751, "nanos": 543}, + "earliest_version_time": {}, + "point_in_time_recovery_enablement": 1, + "app_engine_integration_mode": 1, + "key_prefix": "key_prefix_value", + "delete_protection_state": 1, + "cmek_config": { + "kms_key_name": "kms_key_name_value", + "active_key_version": [ + "active_key_version_value1", + "active_key_version_value2", + ], + }, + "previous_id": "previous_id_value", + "source_info": { + "backup": {"backup": "backup_value"}, + "operation": "operation_value", + }, + "etag": "etag_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = firestore_admin.UpdateDatabaseRequest.meta.fields["database"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["database"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["database"][field])): + del request_init["database"][field][i][subfield] + else: + del request_init["database"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_database(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_database_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_update_database" + ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "pre_update_database" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.UpdateDatabaseRequest.pb( + firestore_admin.UpdateDatabaseRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = firestore_admin.UpdateDatabaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_database_rest_bad_request( + request_type=firestore_admin.DeleteDatabaseRequest, +): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/databases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_database(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore_admin.DeleteDatabaseRequest, + dict, + ], +) +def test_delete_database_rest_call_success(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/databases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_database(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_database_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_delete_database" + ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "pre_delete_database" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.DeleteDatabaseRequest.pb( + firestore_admin.DeleteDatabaseRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = firestore_admin.DeleteDatabaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_backup_rest_bad_request(request_type=firestore_admin.GetBackupRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_backup(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore_admin.GetBackupRequest, + dict, + ], +) +def test_get_backup_rest_call_success(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backup.Backup( + name="name_value", + database="database_value", + database_uid="database_uid_value", + state=backup.Backup.State.CREATING, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_backup(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backup.Backup) + assert response.name == "name_value" + assert response.database == "database_value" + assert response.database_uid == "database_uid_value" + assert response.state == backup.Backup.State.CREATING + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backup_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_get_backup" + ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "pre_get_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.GetBackupRequest.pb( + firestore_admin.GetBackupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = backup.Backup.to_json(backup.Backup()) + req.return_value.content = return_value + + request = firestore_admin.GetBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backup.Backup() + + client.get_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_backups_rest_bad_request(request_type=firestore_admin.ListBackupsRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_backups(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore_admin.ListBackupsRequest, + dict, + ], +) +def test_list_backups_rest_call_success(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = firestore_admin.ListBackupsResponse( + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore_admin.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_backups(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore_admin.ListBackupsResponse) + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backups_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_list_backups" + ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "pre_list_backups" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.ListBackupsRequest.pb( + firestore_admin.ListBackupsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = firestore_admin.ListBackupsResponse.to_json( + firestore_admin.ListBackupsResponse() + ) + req.return_value.content = return_value + + request = firestore_admin.ListBackupsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore_admin.ListBackupsResponse() + + client.list_backups( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_backup_rest_bad_request( + request_type=firestore_admin.DeleteBackupRequest, +): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_backup(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore_admin.DeleteBackupRequest, + dict, + ], +) +def test_delete_backup_rest_call_success(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_backup(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_backup_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "pre_delete_backup" + ) as pre: + pre.assert_not_called() + pb_message = firestore_admin.DeleteBackupRequest.pb( + firestore_admin.DeleteBackupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = firestore_admin.DeleteBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_restore_database_rest_bad_request( + request_type=firestore_admin.RestoreDatabaseRequest, +): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.restore_database(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore_admin.RestoreDatabaseRequest, + dict, + ], +) +def test_restore_database_rest_call_success(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.restore_database(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_restore_database_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_restore_database" + ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "pre_restore_database" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.RestoreDatabaseRequest.pb( + firestore_admin.RestoreDatabaseRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = firestore_admin.RestoreDatabaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.restore_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_backup_schedule_rest_bad_request( + request_type=firestore_admin.CreateBackupScheduleRequest, +): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/databases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_backup_schedule(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore_admin.CreateBackupScheduleRequest, + dict, + ], +) +def test_create_backup_schedule_rest_call_success(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/databases/sample2"} + request_init["backup_schedule"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "retention": {"seconds": 751, "nanos": 543}, + "daily_recurrence": {}, + "weekly_recurrence": {"day": 1}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = firestore_admin.CreateBackupScheduleRequest.meta.fields[ + "backup_schedule" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_schedule"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_schedule"][field])): + del request_init["backup_schedule"][field][i][subfield] + else: + del request_init["backup_schedule"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schedule.BackupSchedule( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_backup_schedule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, schedule.BackupSchedule) + assert response.name == "name_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_backup_schedule_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_create_backup_schedule" + ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "pre_create_backup_schedule" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.CreateBackupScheduleRequest.pb( + firestore_admin.CreateBackupScheduleRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = schedule.BackupSchedule.to_json(schedule.BackupSchedule()) + req.return_value.content = return_value + + request = firestore_admin.CreateBackupScheduleRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = schedule.BackupSchedule() + + client.create_backup_schedule( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_backup_schedule_rest_bad_request( + request_type=firestore_admin.GetBackupScheduleRequest, +): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/databases/sample2/backupSchedules/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_backup_schedule(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore_admin.GetBackupScheduleRequest, + dict, + ], +) +def test_get_backup_schedule_rest_call_success(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/databases/sample2/backupSchedules/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schedule.BackupSchedule( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_backup_schedule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, schedule.BackupSchedule) + assert response.name == "name_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backup_schedule_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_get_backup_schedule" + ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "pre_get_backup_schedule" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.GetBackupScheduleRequest.pb( + firestore_admin.GetBackupScheduleRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = schedule.BackupSchedule.to_json(schedule.BackupSchedule()) + req.return_value.content = return_value + + request = firestore_admin.GetBackupScheduleRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = schedule.BackupSchedule() + + client.get_backup_schedule( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_backup_schedules_rest_bad_request( + request_type=firestore_admin.ListBackupSchedulesRequest, +): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/databases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_backup_schedules(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore_admin.ListBackupSchedulesRequest, + dict, + ], +) +def test_list_backup_schedules_rest_call_success(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/databases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = firestore_admin.ListBackupSchedulesResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore_admin.ListBackupSchedulesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_backup_schedules(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore_admin.ListBackupSchedulesResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backup_schedules_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_list_backup_schedules" + ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "pre_list_backup_schedules" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.ListBackupSchedulesRequest.pb( + firestore_admin.ListBackupSchedulesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = firestore_admin.ListBackupSchedulesResponse.to_json( + firestore_admin.ListBackupSchedulesResponse() + ) + req.return_value.content = return_value + + request = firestore_admin.ListBackupSchedulesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore_admin.ListBackupSchedulesResponse() + + client.list_backup_schedules( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_backup_schedule_rest_bad_request( + request_type=firestore_admin.UpdateBackupScheduleRequest, +): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "backup_schedule": { + "name": "projects/sample1/databases/sample2/backupSchedules/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_backup_schedule(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore_admin.UpdateBackupScheduleRequest, + dict, + ], +) +def test_update_backup_schedule_rest_call_success(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "backup_schedule": { + "name": "projects/sample1/databases/sample2/backupSchedules/sample3" + } + } + request_init["backup_schedule"] = { + "name": "projects/sample1/databases/sample2/backupSchedules/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "retention": {"seconds": 751, "nanos": 543}, + "daily_recurrence": {}, + "weekly_recurrence": {"day": 1}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = firestore_admin.UpdateBackupScheduleRequest.meta.fields[ + "backup_schedule" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_schedule"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_schedule"][field])): + del request_init["backup_schedule"][field][i][subfield] + else: + del request_init["backup_schedule"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schedule.BackupSchedule( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_backup_schedule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, schedule.BackupSchedule) + assert response.name == "name_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_backup_schedule_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_update_backup_schedule" + ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "pre_update_backup_schedule" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.UpdateBackupScheduleRequest.pb( + firestore_admin.UpdateBackupScheduleRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = schedule.BackupSchedule.to_json(schedule.BackupSchedule()) + req.return_value.content = return_value + + request = firestore_admin.UpdateBackupScheduleRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = schedule.BackupSchedule() + + client.update_backup_schedule( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_backup_schedule_rest_bad_request( + request_type=firestore_admin.DeleteBackupScheduleRequest, +): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/databases/sample2/backupSchedules/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_backup_schedule(request) + - @pytest.mark.parametrize( "request_type", [ - firestore_admin.UpdateBackupScheduleRequest, + firestore_admin.DeleteBackupScheduleRequest, dict, ], ) -def test_update_backup_schedule_rest(request_type): +def test_delete_backup_schedule_rest_call_success(request_type): client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "backup_schedule": { - "name": "projects/sample1/databases/sample2/backupSchedules/sample3" - } - } - request_init["backup_schedule"] = { - "name": "projects/sample1/databases/sample2/backupSchedules/sample3", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "retention": {"seconds": 751, "nanos": 543}, - "daily_recurrence": {}, - "weekly_recurrence": {"day": 1}, + "name": "projects/sample1/databases/sample2/backupSchedules/sample3" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + request = request_type(**request_init) - # Determine if the message type is proto-plus or protobuf - test_field = firestore_admin.UpdateBackupScheduleRequest.meta.fields[ - "backup_schedule" - ] + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_backup_schedule(request) - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + # Establish that the response is the type that we expect. + assert response is None - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_backup_schedule_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) - subfields_not_in_runtime = [] + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "pre_delete_backup_schedule" + ) as pre: + pre.assert_not_called() + pb_message = firestore_admin.DeleteBackupScheduleRequest.pb( + firestore_admin.DeleteBackupScheduleRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup_schedule"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) + request = firestore_admin.DeleteBackupScheduleRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["backup_schedule"][field])): - del request_init["backup_schedule"][field][i][subfield] - else: - del request_init["backup_schedule"][field][subfield] + client.delete_backup_schedule( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_cancel_operation_rest_bad_request( + request_type=operations_pb2.CancelOperationRequest, +): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/databases/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/databases/sample2/operations/sample3"} request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + request_type=operations_pb2.DeleteOperationRequest, +): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/databases/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/databases/sample2/operations/sample3"} + request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = schedule.BackupSchedule( - name="name_value", - ) + return_value = None # Wrap the value into a proper Response obj - response_value = Response() + response_value = mock.Mock() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") - response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_backup_schedule(request) + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) # Establish that the response is the type that we expect. - assert isinstance(response, schedule.BackupSchedule) - assert response.name == "name_value" + assert response is None -def test_update_backup_schedule_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/databases/sample2/operations/sample3"}, request + ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) - # Ensure method has been cached - assert ( - client._transport.update_backup_schedule - in client._transport._wrapped_methods - ) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.update_backup_schedule - ] = mock_rpc +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - request = {} - client.update_backup_schedule(request) + request_init = {"name": "projects/sample1/databases/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") - client.update_backup_schedule(request) + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + response = client.get_operation(request) + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -def test_update_backup_schedule_rest_required_fields( - request_type=firestore_admin.UpdateBackupScheduleRequest, -): - transport_class = transports.FirestoreAdminRestTransport - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/databases/sample2"}, request ) - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_backup_schedule._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) - # verify required fields with non-default values are left alone +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = schedule.BackupSchedule() + request_init = {"name": "projects/sample1/databases/sample2"} + request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() - # Convert return value to protobuf type - return_value = schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_backup_schedule(request) + response = client.list_operations(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) -def test_update_backup_schedule_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_initialize_client_w_rest(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - - unset_fields = transport.update_backup_schedule._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("backupSchedule",))) + assert client is not None -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_backup_schedule_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_index_empty_call_rest(): + client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), + transport="rest", ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_update_backup_schedule" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_update_backup_schedule" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.UpdateBackupScheduleRequest.pb( - firestore_admin.UpdateBackupScheduleRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = schedule.BackupSchedule.to_json( - schedule.BackupSchedule() - ) - - request = firestore_admin.UpdateBackupScheduleRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = schedule.BackupSchedule() - client.update_backup_schedule( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_index), "__call__") as call: + client.create_index(request=None) - pre.assert_called_once() - post.assert_called_once() + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.CreateIndexRequest() + assert args[0] == request_msg -def test_update_backup_schedule_rest_bad_request( - transport: str = "rest", request_type=firestore_admin.UpdateBackupScheduleRequest -): + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_indexes_empty_call_rest(): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # send a request that will satisfy transcoding - request_init = { - "backup_schedule": { - "name": "projects/sample1/databases/sample2/backupSchedules/sample3" - } - } - request = request_type(**request_init) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: + client.list_indexes(request=None) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_backup_schedule(request) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.ListIndexesRequest() + assert args[0] == request_msg -def test_update_backup_schedule_rest_flattened(): + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_index_empty_call_rest(): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = schedule.BackupSchedule() + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_index), "__call__") as call: + client.get_index(request=None) - # get arguments that satisfy an http rule for this method - sample_request = { - "backup_schedule": { - "name": "projects/sample1/databases/sample2/backupSchedules/sample3" - } - } + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.GetIndexRequest() - # get truthy value for each flattened field - mock_args = dict( - backup_schedule=schedule.BackupSchedule(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) + assert args[0] == request_msg - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - client.update_backup_schedule(**mock_args) +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_index_empty_call_rest(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{backup_schedule.name=projects/*/databases/*/backupSchedules/*}" - % client.transport._host, - args[1], - ) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_index), "__call__") as call: + client.delete_index(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.DeleteIndexRequest() + assert args[0] == request_msg -def test_update_backup_schedule_rest_flattened_error(transport: str = "rest"): + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_field_empty_call_rest(): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_backup_schedule( - firestore_admin.UpdateBackupScheduleRequest(), - backup_schedule=schedule.BackupSchedule(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_field), "__call__") as call: + client.get_field(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.GetFieldRequest() + + assert args[0] == request_msg -def test_update_backup_schedule_rest_error(): +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_field_empty_call_rest(): client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_field), "__call__") as call: + client.update_field(request=None) -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.DeleteBackupScheduleRequest, - dict, - ], -) -def test_delete_backup_schedule_rest(request_type): + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.UpdateFieldRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_fields_empty_call_rest(): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/databases/sample2/backupSchedules/sample3" - } - request = request_type(**request_init) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_fields), "__call__") as call: + client.list_fields(request=None) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.ListFieldsRequest() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" + assert args[0] == request_msg - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_backup_schedule(request) - # Establish that the response is the type that we expect. - assert response is None +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_export_documents_empty_call_rest(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.export_documents), "__call__") as call: + client.export_documents(request=None) -def test_delete_backup_schedule_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.ExportDocumentsRequest() - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + assert args[0] == request_msg - # Ensure method has been cached - assert ( - client._transport.delete_backup_schedule - in client._transport._wrapped_methods - ) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.delete_backup_schedule - ] = mock_rpc +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_import_documents_empty_call_rest(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - request = {} - client.delete_backup_schedule(request) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.import_documents), "__call__") as call: + client.import_documents(request=None) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.ImportDocumentsRequest() - client.delete_backup_schedule(request) + assert args[0] == request_msg - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_bulk_delete_documents_empty_call_rest(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) -def test_delete_backup_schedule_rest_required_fields( - request_type=firestore_admin.DeleteBackupScheduleRequest, -): - transport_class = transports.FirestoreAdminRestTransport + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.bulk_delete_documents), "__call__" + ) as call: + client.bulk_delete_documents(request=None) - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.BulkDeleteDocumentsRequest() - # verify fields with default values are dropped + assert args[0] == request_msg - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - # verify required fields with default values are now present +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_database_empty_call_rest(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - jsonified_request["name"] = "name_value" + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + client.create_database(request=None) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.CreateDatabaseRequest() - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert args[0] == request_msg + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_database_empty_call_rest(): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_database), "__call__") as call: + client.get_database(request=None) - response_value = Response() - response_value.status_code = 200 - json_return_value = "" + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.GetDatabaseRequest() - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + assert args[0] == request_msg - response = client.delete_backup_schedule(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_databases_empty_call_rest(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + client.list_databases(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.ListDatabasesRequest() + + assert args[0] == request_msg -def test_delete_backup_schedule_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_database_empty_call_rest(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - unset_fields = transport.delete_backup_schedule._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_database), "__call__") as call: + client.update_database(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.UpdateDatabaseRequest() -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_backup_schedule_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_database_empty_call_rest(): + client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), + transport="rest", ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_delete_backup_schedule" - ) as pre: - pre.assert_not_called() - pb_message = firestore_admin.DeleteBackupScheduleRequest.pb( - firestore_admin.DeleteBackupScheduleRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - request = firestore_admin.DeleteBackupScheduleRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_database), "__call__") as call: + client.delete_database(request=None) - client.delete_backup_schedule( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.DeleteDatabaseRequest() - pre.assert_called_once() + assert args[0] == request_msg -def test_delete_backup_schedule_rest_bad_request( - transport: str = "rest", request_type=firestore_admin.DeleteBackupScheduleRequest -): +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_backup_empty_call_rest(): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/databases/sample2/backupSchedules/sample3" - } - request = request_type(**request_init) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + client.get_backup(request=None) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_backup_schedule(request) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.GetBackupRequest() + assert args[0] == request_msg -def test_delete_backup_schedule_rest_flattened(): + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_backups_empty_call_rest(): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + client.list_backups(request=None) - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/databases/sample2/backupSchedules/sample3" - } + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.ListBackupsRequest() - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) + assert args[0] == request_msg - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - client.delete_backup_schedule(**mock_args) +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_backup_empty_call_rest(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/databases/*/backupSchedules/*}" - % client.transport._host, - args[1], - ) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + client.delete_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.DeleteBackupRequest() + assert args[0] == request_msg -def test_delete_backup_schedule_rest_flattened_error(transport: str = "rest"): + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_restore_database_empty_call_rest(): client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_backup_schedule( - firestore_admin.DeleteBackupScheduleRequest(), - name="name_value", - ) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.restore_database), "__call__") as call: + client.restore_database(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.RestoreDatabaseRequest() -def test_delete_backup_schedule_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + assert args[0] == request_msg -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.FirestoreAdminGrpcTransport( +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_backup_schedule_empty_call_rest(): + client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.FirestoreAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = FirestoreAdminClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), "__call__" + ) as call: + client.create_backup_schedule(request=None) - # It is an error to provide an api_key and a transport instance. - transport = transports.FirestoreAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = FirestoreAdminClient( - client_options=options, - transport=transport, - ) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.CreateBackupScheduleRequest() - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = FirestoreAdminClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) + assert args[0] == request_msg - # It is an error to provide scopes and a transport instance. - transport = transports.FirestoreAdminGrpcTransport( + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_backup_schedule_empty_call_rest(): + client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = FirestoreAdminClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), "__call__" + ) as call: + client.get_backup_schedule(request=None) -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.FirestoreAdminGrpcTransport( + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.GetBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_backup_schedules_empty_call_rest(): + client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - client = FirestoreAdminClient(transport=transport) - assert client.transport is transport + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), "__call__" + ) as call: + client.list_backup_schedules(request=None) -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.FirestoreAdminGrpcTransport( + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.ListBackupSchedulesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_backup_schedule_empty_call_rest(): + client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - channel = transport.grpc_channel - assert channel - transport = transports.FirestoreAdminGrpcAsyncIOTransport( + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), "__call__" + ) as call: + client.update_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.UpdateBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_backup_schedule_empty_call_rest(): + client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - channel = transport.grpc_channel - assert channel + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), "__call__" + ) as call: + client.delete_backup_schedule(request=None) -@pytest.mark.parametrize( - "transport_class", - [ - transports.FirestoreAdminGrpcTransport, - transports.FirestoreAdminGrpcAsyncIOTransport, - transports.FirestoreAdminRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore_admin.DeleteBackupScheduleRequest() -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = FirestoreAdminClient.get_transport_class(transport_name)( + assert args[0] == request_msg + + +def test_firestore_admin_rest_lro_client(): + client = FirestoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, ) - assert transport.kind == transport_name + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client def test_transport_grpc_default(): @@ -18838,23 +19640,6 @@ def test_firestore_admin_http_transport_client_cert_source_for_mtls(): mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -def test_firestore_admin_rest_lro_client(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - @pytest.mark.parametrize( "transport_name", [ @@ -19365,375 +20150,129 @@ def test_parse_operation_path(): def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = FirestoreAdminClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = FirestoreAdminClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_common_billing_account_path(path) - assert expected == actual - - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = FirestoreAdminClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = FirestoreAdminClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = FirestoreAdminClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = FirestoreAdminClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format( - project=project, - ) - actual = FirestoreAdminClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = FirestoreAdminClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = FirestoreAdminClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = FirestoreAdminClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.FirestoreAdminTransport, "_prep_wrapped_messages" - ) as prep: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.FirestoreAdminTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = FirestoreAdminClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_cancel_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.CancelOperationRequest -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/databases/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/databases/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/databases/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = FirestoreAdminClient.common_billing_account_path(billing_account) + assert expected == actual -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.DeleteOperationRequest, - dict, - ], -) -def test_delete_operation_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/databases/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = FirestoreAdminClient.common_billing_account_path(**expected) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" + # Check that the path construction is reversible. + actual = FirestoreAdminClient.parse_common_billing_account_path(path) + assert expected == actual - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_operation(request) +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = FirestoreAdminClient.common_folder_path(folder) + assert expected == actual - # Establish that the response is the type that we expect. - assert response is None +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = FirestoreAdminClient.common_folder_path(**expected) -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Check that the path construction is reversible. + actual = FirestoreAdminClient.parse_common_folder_path(path) + assert expected == actual - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/databases/sample2/operations/sample3"}, request + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, ) + actual = FirestoreAdminClient.common_organization_path(organization) + assert expected == actual - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = FirestoreAdminClient.common_organization_path(**expected) -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/databases/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() + # Check that the path construction is reversible. + actual = FirestoreAdminClient.parse_common_organization_path(path) + assert expected == actual - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = FirestoreAdminClient.common_project_path(project) + assert expected == actual - response = client.get_operation(request) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = FirestoreAdminClient.common_project_path(**expected) + # Check that the path construction is reversible. + actual = FirestoreAdminClient.parse_common_project_path(path) + assert expected == actual -def test_list_operations_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.ListOperationsRequest -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/databases/sample2"}, request +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, ) + actual = FirestoreAdminClient.common_location_path(project, location) + assert expected == actual - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = FirestoreAdminClient.common_location_path(**expected) -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.ListOperationsRequest, - dict, - ], -) -def test_list_operations_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() + # Check that the path construction is reversible. + actual = FirestoreAdminClient.parse_common_location_path(path) + assert expected == actual - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() - response = client.list_operations(request) + with mock.patch.object( + transports.FirestoreAdminTransport, "_prep_wrapped_messages" + ) as prep: + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + with mock.patch.object( + transports.FirestoreAdminTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = FirestoreAdminClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) def test_delete_operation(transport: str = "grpc"): @@ -19763,7 +20302,7 @@ def test_delete_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_delete_operation_async(transport: str = "grpc_asyncio"): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -19816,7 +20355,7 @@ def test_delete_operation_field_headers(): @pytest.mark.asyncio async def test_delete_operation_field_headers_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -19861,7 +20400,7 @@ def test_delete_operation_from_dict(): @pytest.mark.asyncio async def test_delete_operation_from_dict_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: @@ -19902,7 +20441,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -19955,7 +20494,7 @@ def test_cancel_operation_field_headers(): @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -20000,7 +20539,7 @@ def test_cancel_operation_from_dict(): @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -20041,7 +20580,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -20096,7 +20635,7 @@ def test_get_operation_field_headers(): @pytest.mark.asyncio async def test_get_operation_field_headers_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -20143,7 +20682,7 @@ def test_get_operation_from_dict(): @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -20186,7 +20725,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -20241,7 +20780,7 @@ def test_list_operations_field_headers(): @pytest.mark.asyncio async def test_list_operations_field_headers_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -20288,7 +20827,7 @@ def test_list_operations_from_dict(): @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -20304,22 +20843,41 @@ async def test_list_operations_from_dict_async(): call.assert_called() -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } +def test_transport_close_grpc(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() - for transport, close_name in transports.items(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - with mock.patch.object( - type(getattr(client.transport, close_name)), "close" - ) as close: - with client: - close.assert_not_called() - close.assert_called_once() + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = FirestoreAdminAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() def test_client_ctx(): diff --git a/tests/unit/gapic/firestore_v1/test_firestore.py b/tests/unit/gapic/firestore_v1/test_firestore.py index ac1e63e854..e99f5ae4a9 100644 --- a/tests/unit/gapic/firestore_v1/test_firestore.py +++ b/tests/unit/gapic/firestore_v1/test_firestore.py @@ -24,7 +24,7 @@ import grpc from grpc.experimental import aio -from collections.abc import Iterable +from collections.abc import Iterable, AsyncIterable from google.protobuf import json_format import json import math @@ -37,6 +37,13 @@ from requests.sessions import Session from google.protobuf import json_format +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + from google.api_core import client_options from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 @@ -69,10 +76,24 @@ import google.auth +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. @@ -287,86 +308,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (FirestoreClient, transports.FirestoreGrpcTransport, "grpc"), - (FirestoreClient, transports.FirestoreRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1128,25 +1069,6 @@ def test_get_document(request_type, transport: str = "grpc"): assert response.name == "name_value" -def test_get_document_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_document), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_document() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.GetDocumentRequest() - - def test_get_document_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1210,29 +1132,6 @@ def test_get_document_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_document_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_document), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - document.Document( - name="name_value", - ) - ) - response = await client.get_document() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.GetDocumentRequest() - - @pytest.mark.asyncio async def test_get_document_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -1241,7 +1140,7 @@ async def test_get_document_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1280,7 +1179,7 @@ async def test_get_document_async( transport: str = "grpc_asyncio", request_type=firestore.GetDocumentRequest ): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1346,7 +1245,7 @@ def test_get_document_field_headers(): @pytest.mark.asyncio async def test_get_document_field_headers_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1409,25 +1308,6 @@ def test_list_documents(request_type, transport: str = "grpc"): assert response.next_page_token == "next_page_token_value" -def test_list_documents_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_documents), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_documents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.ListDocumentsRequest() - - def test_list_documents_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1497,29 +1377,6 @@ def test_list_documents_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_documents_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_documents), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.ListDocumentsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_documents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.ListDocumentsRequest() - - @pytest.mark.asyncio async def test_list_documents_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -1528,7 +1385,7 @@ async def test_list_documents_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1567,7 +1424,7 @@ async def test_list_documents_async( transport: str = "grpc_asyncio", request_type=firestore.ListDocumentsRequest ): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1634,7 +1491,7 @@ def test_list_documents_field_headers(): @pytest.mark.asyncio async def test_list_documents_field_headers_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1767,7 +1624,7 @@ def test_list_documents_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_documents_async_pager(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1817,7 +1674,7 @@ async def test_list_documents_async_pager(): @pytest.mark.asyncio async def test_list_documents_async_pages(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1899,25 +1756,6 @@ def test_update_document(request_type, transport: str = "grpc"): assert response.name == "name_value" -def test_update_document_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_document), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_document() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.UpdateDocumentRequest() - - def test_update_document_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1977,29 +1815,6 @@ def test_update_document_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_document_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_document), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gf_document.Document( - name="name_value", - ) - ) - response = await client.update_document() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.UpdateDocumentRequest() - - @pytest.mark.asyncio async def test_update_document_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -2008,7 +1823,7 @@ async def test_update_document_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2047,7 +1862,7 @@ async def test_update_document_async( transport: str = "grpc_asyncio", request_type=firestore.UpdateDocumentRequest ): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2113,7 +1928,7 @@ def test_update_document_field_headers(): @pytest.mark.asyncio async def test_update_document_field_headers_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2188,7 +2003,7 @@ def test_update_document_flattened_error(): @pytest.mark.asyncio async def test_update_document_flattened_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2221,7 +2036,7 @@ async def test_update_document_flattened_async(): @pytest.mark.asyncio async def test_update_document_flattened_error_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2267,25 +2082,6 @@ def test_delete_document(request_type, transport: str = "grpc"): assert response is None -def test_delete_document_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_document), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_document() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.DeleteDocumentRequest() - - def test_delete_document_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2349,25 +2145,6 @@ def test_delete_document_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_document_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_document), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_document() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.DeleteDocumentRequest() - - @pytest.mark.asyncio async def test_delete_document_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -2376,7 +2153,7 @@ async def test_delete_document_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2415,7 +2192,7 @@ async def test_delete_document_async( transport: str = "grpc_asyncio", request_type=firestore.DeleteDocumentRequest ): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2476,7 +2253,7 @@ def test_delete_document_field_headers(): @pytest.mark.asyncio async def test_delete_document_field_headers_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2544,7 +2321,7 @@ def test_delete_document_flattened_error(): @pytest.mark.asyncio async def test_delete_document_flattened_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2571,7 +2348,7 @@ async def test_delete_document_flattened_async(): @pytest.mark.asyncio async def test_delete_document_flattened_error_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2619,27 +2396,6 @@ def test_batch_get_documents(request_type, transport: str = "grpc"): assert isinstance(message, firestore.BatchGetDocumentsResponse) -def test_batch_get_documents_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_documents), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.batch_get_documents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.BatchGetDocumentsRequest() - - def test_batch_get_documents_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2709,30 +2465,6 @@ def test_batch_get_documents_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_batch_get_documents_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_documents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock( - side_effect=[firestore.BatchGetDocumentsResponse()] - ) - response = await client.batch_get_documents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.BatchGetDocumentsRequest() - - @pytest.mark.asyncio async def test_batch_get_documents_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -2741,7 +2473,7 @@ async def test_batch_get_documents_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2780,7 +2512,7 @@ async def test_batch_get_documents_async( transport: str = "grpc_asyncio", request_type=firestore.BatchGetDocumentsRequest ): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2849,7 +2581,7 @@ def test_batch_get_documents_field_headers(): @pytest.mark.asyncio async def test_batch_get_documents_field_headers_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2919,27 +2651,6 @@ def test_begin_transaction(request_type, transport: str = "grpc"): assert response.transaction == b"transaction_blob" -def test_begin_transaction_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.begin_transaction() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.BeginTransactionRequest() - - def test_begin_transaction_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3008,41 +2719,16 @@ def test_begin_transaction_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_begin_transaction_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.BeginTransactionResponse( - transaction=b"transaction_blob", - ) - ) - response = await client.begin_transaction() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.BeginTransactionRequest() - - -@pytest.mark.asyncio -async def test_begin_transaction_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +async def test_begin_transaction_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = FirestoreAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) # Should wrap all calls on client creation assert wrapper_fn.call_count > 0 @@ -3079,7 +2765,7 @@ async def test_begin_transaction_async( transport: str = "grpc_asyncio", request_type=firestore.BeginTransactionRequest ): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3149,7 +2835,7 @@ def test_begin_transaction_field_headers(): @pytest.mark.asyncio async def test_begin_transaction_field_headers_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3223,7 +2909,7 @@ def test_begin_transaction_flattened_error(): @pytest.mark.asyncio async def test_begin_transaction_flattened_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3254,7 +2940,7 @@ async def test_begin_transaction_flattened_async(): @pytest.mark.asyncio async def test_begin_transaction_flattened_error_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -3299,25 +2985,6 @@ def test_commit(request_type, transport: str = "grpc"): assert isinstance(response, firestore.CommitResponse) -def test_commit_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.commit), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.commit() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.CommitRequest() - - def test_commit_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3381,34 +3048,13 @@ def test_commit_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_commit_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.commit), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.CommitResponse() - ) - response = await client.commit() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.CommitRequest() - - @pytest.mark.asyncio async def test_commit_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3447,7 +3093,7 @@ async def test_commit_async( transport: str = "grpc_asyncio", request_type=firestore.CommitRequest ): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3510,7 +3156,7 @@ def test_commit_field_headers(): @pytest.mark.asyncio async def test_commit_field_headers_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3585,7 +3231,7 @@ def test_commit_flattened_error(): @pytest.mark.asyncio async def test_commit_flattened_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3618,7 +3264,7 @@ async def test_commit_flattened_async(): @pytest.mark.asyncio async def test_commit_flattened_error_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -3664,25 +3310,6 @@ def test_rollback(request_type, transport: str = "grpc"): assert response is None -def test_rollback_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.rollback), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.rollback() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.RollbackRequest() - - def test_rollback_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3746,32 +3373,13 @@ def test_rollback_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_rollback_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.rollback), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.rollback() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.RollbackRequest() - - @pytest.mark.asyncio async def test_rollback_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3810,7 +3418,7 @@ async def test_rollback_async( transport: str = "grpc_asyncio", request_type=firestore.RollbackRequest ): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3871,7 +3479,7 @@ def test_rollback_field_headers(): @pytest.mark.asyncio async def test_rollback_field_headers_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3944,7 +3552,7 @@ def test_rollback_flattened_error(): @pytest.mark.asyncio async def test_rollback_flattened_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3975,7 +3583,7 @@ async def test_rollback_flattened_async(): @pytest.mark.asyncio async def test_rollback_flattened_error_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -4022,25 +3630,6 @@ def test_run_query(request_type, transport: str = "grpc"): assert isinstance(message, firestore.RunQueryResponse) -def test_run_query_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.run_query), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.run_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.RunQueryRequest() - - def test_run_query_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4104,35 +3693,13 @@ def test_run_query_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_run_query_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.run_query), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock( - side_effect=[firestore.RunQueryResponse()] - ) - response = await client.run_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.RunQueryRequest() - - @pytest.mark.asyncio async def test_run_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4171,7 +3738,7 @@ async def test_run_query_async( transport: str = "grpc_asyncio", request_type=firestore.RunQueryRequest ): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4236,7 +3803,7 @@ def test_run_query_field_headers(): @pytest.mark.asyncio async def test_run_query_field_headers_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4302,27 +3869,6 @@ def test_run_aggregation_query(request_type, transport: str = "grpc"): assert isinstance(message, firestore.RunAggregationQueryResponse) -def test_run_aggregation_query_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_aggregation_query), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.run_aggregation_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.RunAggregationQueryRequest() - - def test_run_aggregation_query_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4393,30 +3939,6 @@ def test_run_aggregation_query_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_run_aggregation_query_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_aggregation_query), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock( - side_effect=[firestore.RunAggregationQueryResponse()] - ) - response = await client.run_aggregation_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.RunAggregationQueryRequest() - - @pytest.mark.asyncio async def test_run_aggregation_query_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -4425,7 +3947,7 @@ async def test_run_aggregation_query_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4464,7 +3986,7 @@ async def test_run_aggregation_query_async( transport: str = "grpc_asyncio", request_type=firestore.RunAggregationQueryRequest ): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4533,7 +4055,7 @@ def test_run_aggregation_query_field_headers(): @pytest.mark.asyncio async def test_run_aggregation_query_field_headers_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4601,25 +4123,6 @@ def test_partition_query(request_type, transport: str = "grpc"): assert response.next_page_token == "next_page_token_value" -def test_partition_query_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.partition_query), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.partition_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.PartitionQueryRequest() - - def test_partition_query_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4685,29 +4188,6 @@ def test_partition_query_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_partition_query_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.partition_query), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.PartitionQueryResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.partition_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.PartitionQueryRequest() - - @pytest.mark.asyncio async def test_partition_query_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -4716,7 +4196,7 @@ async def test_partition_query_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4755,7 +4235,7 @@ async def test_partition_query_async( transport: str = "grpc_asyncio", request_type=firestore.PartitionQueryRequest ): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4821,7 +4301,7 @@ def test_partition_query_field_headers(): @pytest.mark.asyncio async def test_partition_query_field_headers_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4948,7 +4428,7 @@ def test_partition_query_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_partition_query_async_pager(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4998,7 +4478,7 @@ async def test_partition_query_async_pager(): @pytest.mark.asyncio async def test_partition_query_async_pages(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5119,7 +4599,7 @@ async def test_write_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5158,7 +4638,7 @@ async def test_write_async( transport: str = "grpc_asyncio", request_type=firestore.WriteRequest ): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5264,7 +4744,7 @@ async def test_listen_async_use_cached_wrapped_rpc(transport: str = "grpc_asynci # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5303,7 +4783,7 @@ async def test_listen_async( transport: str = "grpc_asyncio", request_type=firestore.ListenRequest ): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5376,27 +4856,6 @@ def test_list_collection_ids(request_type, transport: str = "grpc"): assert response.next_page_token == "next_page_token_value" -def test_list_collection_ids_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_collection_ids() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.ListCollectionIdsRequest() - - def test_list_collection_ids_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -5468,32 +4927,6 @@ def test_list_collection_ids_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_collection_ids_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.ListCollectionIdsResponse( - collection_ids=["collection_ids_value"], - next_page_token="next_page_token_value", - ) - ) - response = await client.list_collection_ids() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.ListCollectionIdsRequest() - - @pytest.mark.asyncio async def test_list_collection_ids_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -5502,7 +4935,7 @@ async def test_list_collection_ids_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5541,7 +4974,7 @@ async def test_list_collection_ids_async( transport: str = "grpc_asyncio", request_type=firestore.ListCollectionIdsRequest ): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5613,7 +5046,7 @@ def test_list_collection_ids_field_headers(): @pytest.mark.asyncio async def test_list_collection_ids_field_headers_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5687,7 +5120,7 @@ def test_list_collection_ids_flattened_error(): @pytest.mark.asyncio async def test_list_collection_ids_flattened_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5718,7 +5151,7 @@ async def test_list_collection_ids_flattened_async(): @pytest.mark.asyncio async def test_list_collection_ids_flattened_error_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -5832,7 +5265,7 @@ def test_list_collection_ids_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_collection_ids_async_pager(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5884,7 +5317,7 @@ async def test_list_collection_ids_async_pager(): @pytest.mark.asyncio async def test_list_collection_ids_async_pages(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5965,28 +5398,9 @@ def test_batch_write(request_type, transport: str = "grpc"): assert isinstance(response, firestore.BatchWriteResponse) -def test_batch_write_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.batch_write), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.batch_write() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.BatchWriteRequest() - - -def test_batch_write_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. +def test_batch_write_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = FirestoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", @@ -6047,27 +5461,6 @@ def test_batch_write_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_batch_write_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.batch_write), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.BatchWriteResponse() - ) - response = await client.batch_write() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.BatchWriteRequest() - - @pytest.mark.asyncio async def test_batch_write_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -6076,7 +5469,7 @@ async def test_batch_write_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6115,7 +5508,7 @@ async def test_batch_write_async( transport: str = "grpc_asyncio", request_type=firestore.BatchWriteRequest ): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6178,7 +5571,7 @@ def test_batch_write_field_headers(): @pytest.mark.asyncio async def test_batch_write_field_headers_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6243,25 +5636,6 @@ def test_create_document(request_type, transport: str = "grpc"): assert response.name == "name_value" -def test_create_document_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_document), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_document() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.CreateDocumentRequest() - - def test_create_document_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -6329,29 +5703,6 @@ def test_create_document_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_document_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_document), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - document.Document( - name="name_value", - ) - ) - response = await client.create_document() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.CreateDocumentRequest() - - @pytest.mark.asyncio async def test_create_document_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -6360,7 +5711,7 @@ async def test_create_document_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6399,7 +5750,7 @@ async def test_create_document_async( transport: str = "grpc_asyncio", request_type=firestore.CreateDocumentRequest ): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6466,7 +5817,7 @@ def test_create_document_field_headers(): @pytest.mark.asyncio async def test_create_document_field_headers_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6494,48 +5845,6 @@ async def test_create_document_field_headers_async(): ) in kw["metadata"] -@pytest.mark.parametrize( - "request_type", - [ - firestore.GetDocumentRequest, - dict, - ], -) -def test_get_document_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/databases/sample2/documents/sample3/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = document.Document( - name="name_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_document(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - assert response.name == "name_value" - - def test_get_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -6644,6 +5953,7 @@ def test_get_document_rest_required_fields(request_type=firestore.GetDocumentReq response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_document(request) @@ -6670,132 +5980,6 @@ def test_get_document_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_document_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "post_get_document" - ) as post, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_get_document" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore.GetDocumentRequest.pb(firestore.GetDocumentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = document.Document.to_json(document.Document()) - - request = firestore.GetDocumentRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = document.Document() - - client.get_document( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_document_rest_bad_request( - transport: str = "rest", request_type=firestore.GetDocumentRequest -): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/databases/sample2/documents/sample3/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_document(request) - - -def test_get_document_rest_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.ListDocumentsRequest, - dict, - ], -) -def test_list_documents_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/databases/sample2/documents/sample3/sample4", - "collection_id": "sample5", - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore.ListDocumentsResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore.ListDocumentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_documents(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDocumentsPager) - assert response.next_page_token == "next_page_token_value" - - def test_list_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -6910,6 +6094,7 @@ def test_list_documents_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_documents(request) @@ -6940,90 +6125,10 @@ def test_list_documents_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_documents_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( +def test_list_documents_rest_pager(transport: str = "rest"): + client = FirestoreClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "post_list_documents" - ) as post, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_list_documents" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore.ListDocumentsRequest.pb(firestore.ListDocumentsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = firestore.ListDocumentsResponse.to_json( - firestore.ListDocumentsResponse() - ) - - request = firestore.ListDocumentsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore.ListDocumentsResponse() - - client.list_documents( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_documents_rest_bad_request( - transport: str = "rest", request_type=firestore.ListDocumentsRequest -): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/databases/sample2/documents/sample3/sample4", - "collection_id": "sample5", - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_documents(request) - - -def test_list_documents_rest_pager(transport: str = "rest"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport, ) # Mock the http request call within the method and fake a response. @@ -7084,123 +6189,6 @@ def test_list_documents_rest_pager(transport: str = "rest"): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize( - "request_type", - [ - firestore.UpdateDocumentRequest, - dict, - ], -) -def test_update_document_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "document": { - "name": "projects/sample1/databases/sample2/documents/sample3/sample4" - } - } - request_init["document"] = { - "name": "projects/sample1/databases/sample2/documents/sample3/sample4", - "fields": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = firestore.UpdateDocumentRequest.meta.fields["document"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["document"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["document"][field])): - del request_init["document"][field][i][subfield] - else: - del request_init["document"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = gf_document.Document( - name="name_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gf_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_document(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gf_document.Document) - assert response.name == "name_value" - - def test_update_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7307,6 +6295,7 @@ def test_update_document_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_document(request) @@ -7333,87 +6322,6 @@ def test_update_document_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_document_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "post_update_document" - ) as post, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_update_document" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore.UpdateDocumentRequest.pb( - firestore.UpdateDocumentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = gf_document.Document.to_json(gf_document.Document()) - - request = firestore.UpdateDocumentRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gf_document.Document() - - client.update_document( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_document_rest_bad_request( - transport: str = "rest", request_type=firestore.UpdateDocumentRequest -): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "document": { - "name": "projects/sample1/databases/sample2/documents/sample3/sample4" - } - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_document(request) - - def test_update_document_rest_flattened(): client = FirestoreClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7447,6 +6355,7 @@ def test_update_document_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_document(**mock_args) @@ -7477,57 +6386,14 @@ def test_update_document_rest_flattened_error(transport: str = "rest"): ) -def test_update_document_rest_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.DeleteDocumentRequest, - dict, - ], -) -def test_delete_document_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/databases/sample2/documents/sample3/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_document(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_document_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +def test_delete_document_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) # Should wrap all calls on client creation assert wrapper_fn.call_count > 0 @@ -7621,6 +6487,7 @@ def test_delete_document_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_document(request) @@ -7638,79 +6505,6 @@ def test_delete_document_rest_unset_required_fields(): assert set(unset_fields) == (set(("currentDocument",)) & set(("name",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_document_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_delete_document" - ) as pre: - pre.assert_not_called() - pb_message = firestore.DeleteDocumentRequest.pb( - firestore.DeleteDocumentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = firestore.DeleteDocumentRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_document( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_document_rest_bad_request( - transport: str = "rest", request_type=firestore.DeleteDocumentRequest -): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/databases/sample2/documents/sample3/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_document(request) - - def test_delete_document_rest_flattened(): client = FirestoreClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7739,6 +6533,7 @@ def test_delete_document_rest_flattened(): json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_document(**mock_args) @@ -7768,60 +6563,6 @@ def test_delete_document_rest_flattened_error(transport: str = "rest"): ) -def test_delete_document_rest_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.BatchGetDocumentsRequest, - dict, - ], -) -def test_batch_get_documents_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"database": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore.BatchGetDocumentsResponse( - transaction=b"transaction_blob", - missing="missing_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore.BatchGetDocumentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - json_return_value = "[{}]".format(json_return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - with mock.patch.object(response_value, "iter_content") as iter_content: - iter_content.return_value = iter(json_return_value) - response = client.batch_get_documents(request) - - assert isinstance(response, Iterable) - response = next(response) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.BatchGetDocumentsResponse) - assert response.transaction == b"transaction_blob" - - def test_batch_get_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7930,6 +6671,7 @@ def test_batch_get_documents_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} with mock.patch.object(response_value, "iter_content") as iter_content: iter_content.return_value = iter(json_return_value) @@ -7949,182 +6691,56 @@ def test_batch_get_documents_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("database",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_get_documents_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "post_batch_get_documents" - ) as post, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_batch_get_documents" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore.BatchGetDocumentsRequest.pb( - firestore.BatchGetDocumentsRequest() +def test_begin_transaction_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = firestore.BatchGetDocumentsResponse.to_json( - firestore.BatchGetDocumentsResponse() - ) - req.return_value._content = "[{}]".format(req.return_value._content) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - request = firestore.BatchGetDocumentsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore.BatchGetDocumentsResponse() + # Ensure method has been cached + assert client._transport.begin_transaction in client._transport._wrapped_methods - client.batch_get_documents( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client._transport._wrapped_methods[ + client._transport.begin_transaction + ] = mock_rpc - pre.assert_called_once() - post.assert_called_once() + request = {} + client.begin_transaction(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -def test_batch_get_documents_rest_bad_request( - transport: str = "rest", request_type=firestore.BatchGetDocumentsRequest + client.begin_transaction(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_begin_transaction_rest_required_fields( + request_type=firestore.BeginTransactionRequest, ): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + transport_class = transports.FirestoreRestTransport - # send a request that will satisfy transcoding - request_init = {"database": "projects/sample1/databases/sample2"} + request_init = {} + request_init["database"] = "" request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.batch_get_documents(request) - - -def test_batch_get_documents_rest_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.BeginTransactionRequest, - dict, - ], -) -def test_begin_transaction_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"database": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore.BeginTransactionResponse( - transaction=b"transaction_blob", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore.BeginTransactionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.begin_transaction(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.BeginTransactionResponse) - assert response.transaction == b"transaction_blob" - - -def test_begin_transaction_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.begin_transaction in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.begin_transaction - ] = mock_rpc - - request = {} - client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.begin_transaction(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_begin_transaction_rest_required_fields( - request_type=firestore.BeginTransactionRequest, -): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request_init["database"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped @@ -8180,6 +6796,7 @@ def test_begin_transaction_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.begin_transaction(request) @@ -8197,85 +6814,6 @@ def test_begin_transaction_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("database",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_begin_transaction_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "post_begin_transaction" - ) as post, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_begin_transaction" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore.BeginTransactionRequest.pb( - firestore.BeginTransactionRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = firestore.BeginTransactionResponse.to_json( - firestore.BeginTransactionResponse() - ) - - request = firestore.BeginTransactionRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore.BeginTransactionResponse() - - client.begin_transaction( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_begin_transaction_rest_bad_request( - transport: str = "rest", request_type=firestore.BeginTransactionRequest -): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"database": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.begin_transaction(request) - - def test_begin_transaction_rest_flattened(): client = FirestoreClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8304,6 +6842,7 @@ def test_begin_transaction_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.begin_transaction(**mock_args) @@ -8333,49 +6872,6 @@ def test_begin_transaction_rest_flattened_error(transport: str = "rest"): ) -def test_begin_transaction_rest_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.CommitRequest, - dict, - ], -) -def test_commit_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"database": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore.CommitResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore.CommitResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.commit(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.CommitResponse) - - def test_commit_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -8477,6 +6973,7 @@ def test_commit_rest_required_fields(request_type=firestore.CommitRequest): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.commit(request) @@ -8494,93 +6991,16 @@ def test_commit_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("database",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_commit_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( +def test_commit_rest_flattened(): + client = FirestoreClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + transport="rest", ) - client = FirestoreClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "post_commit" - ) as post, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_commit" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore.CommitRequest.pb(firestore.CommitRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = firestore.CommitResponse.to_json( - firestore.CommitResponse() - ) - - request = firestore.CommitRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore.CommitResponse() - - client.commit( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_commit_rest_bad_request( - transport: str = "rest", request_type=firestore.CommitRequest -): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"database": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.commit(request) - - -def test_commit_rest_flattened(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore.CommitResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = firestore.CommitResponse() # get arguments that satisfy an http rule for this method sample_request = {"database": "projects/sample1/databases/sample2"} @@ -8600,6 +7020,7 @@ def test_commit_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.commit(**mock_args) @@ -8630,47 +7051,6 @@ def test_commit_rest_flattened_error(transport: str = "rest"): ) -def test_commit_rest_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.RollbackRequest, - dict, - ], -) -def test_rollback_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"database": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.rollback(request) - - # Establish that the response is the type that we expect. - assert response is None - - def test_rollback_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -8773,6 +7153,7 @@ def test_rollback_rest_required_fields(request_type=firestore.RollbackRequest): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.rollback(request) @@ -8798,75 +7179,6 @@ def test_rollback_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_rollback_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_rollback" - ) as pre: - pre.assert_not_called() - pb_message = firestore.RollbackRequest.pb(firestore.RollbackRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = firestore.RollbackRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.rollback( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_rollback_rest_bad_request( - transport: str = "rest", request_type=firestore.RollbackRequest -): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"database": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.rollback(request) - - def test_rollback_rest_flattened(): client = FirestoreClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8894,6 +7206,7 @@ def test_rollback_rest_flattened(): json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.rollback(**mock_args) @@ -8924,62 +7237,6 @@ def test_rollback_rest_flattened_error(transport: str = "rest"): ) -def test_rollback_rest_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.RunQueryRequest, - dict, - ], -) -def test_run_query_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/databases/sample2/documents"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore.RunQueryResponse( - transaction=b"transaction_blob", - skipped_results=1633, - done=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore.RunQueryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - json_return_value = "[{}]".format(json_return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - with mock.patch.object(response_value, "iter_content") as iter_content: - iter_content.return_value = iter(json_return_value) - response = client.run_query(request) - - assert isinstance(response, Iterable) - response = next(response) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.RunQueryResponse) - assert response.transaction == b"transaction_blob" - assert response.skipped_results == 1633 - - def test_run_query_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -9082,6 +7339,7 @@ def test_run_query_rest_required_fields(request_type=firestore.RunQueryRequest): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} with mock.patch.object(response_value, "iter_content") as iter_content: iter_content.return_value = iter(json_return_value) @@ -9101,149 +7359,18 @@ def test_run_query_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("parent",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_run_query_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "post_run_query" - ) as post, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_run_query" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore.RunQueryRequest.pb(firestore.RunQueryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = firestore.RunQueryResponse.to_json( - firestore.RunQueryResponse() +def test_run_aggregation_query_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - req.return_value._content = "[{}]".format(req.return_value._content) - request = firestore.RunQueryRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore.RunQueryResponse() - - client.run_query( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_run_query_rest_bad_request( - transport: str = "rest", request_type=firestore.RunQueryRequest -): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/databases/sample2/documents"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.run_query(request) - - -def test_run_query_rest_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.RunAggregationQueryRequest, - dict, - ], -) -def test_run_aggregation_query_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/databases/sample2/documents"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore.RunAggregationQueryResponse( - transaction=b"transaction_blob", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore.RunAggregationQueryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - json_return_value = "[{}]".format(json_return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - with mock.patch.object(response_value, "iter_content") as iter_content: - iter_content.return_value = iter(json_return_value) - response = client.run_aggregation_query(request) - - assert isinstance(response, Iterable) - response = next(response) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.RunAggregationQueryResponse) - assert response.transaction == b"transaction_blob" - - -def test_run_aggregation_query_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() # Ensure method has been cached assert ( @@ -9341,6 +7468,7 @@ def test_run_aggregation_query_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} with mock.patch.object(response_value, "iter_content") as iter_content: iter_content.return_value = iter(json_return_value) @@ -9360,132 +7488,6 @@ def test_run_aggregation_query_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("parent",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_run_aggregation_query_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "post_run_aggregation_query" - ) as post, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_run_aggregation_query" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore.RunAggregationQueryRequest.pb( - firestore.RunAggregationQueryRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = firestore.RunAggregationQueryResponse.to_json( - firestore.RunAggregationQueryResponse() - ) - req.return_value._content = "[{}]".format(req.return_value._content) - - request = firestore.RunAggregationQueryRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore.RunAggregationQueryResponse() - - client.run_aggregation_query( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_run_aggregation_query_rest_bad_request( - transport: str = "rest", request_type=firestore.RunAggregationQueryRequest -): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/databases/sample2/documents"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.run_aggregation_query(request) - - -def test_run_aggregation_query_rest_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.PartitionQueryRequest, - dict, - ], -) -def test_partition_query_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/databases/sample2/documents"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore.PartitionQueryResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore.PartitionQueryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.partition_query(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.PartitionQueryPager) - assert response.next_page_token == "next_page_token_value" - - def test_partition_query_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -9589,6 +7591,7 @@ def test_partition_query_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.partition_query(request) @@ -9606,89 +7609,10 @@ def test_partition_query_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("parent",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_partition_query_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( +def test_partition_query_rest_pager(transport: str = "rest"): + client = FirestoreClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "post_partition_query" - ) as post, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_partition_query" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore.PartitionQueryRequest.pb( - firestore.PartitionQueryRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = firestore.PartitionQueryResponse.to_json( - firestore.PartitionQueryResponse() - ) - - request = firestore.PartitionQueryRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore.PartitionQueryResponse() - - client.partition_query( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_partition_query_rest_bad_request( - transport: str = "rest", request_type=firestore.PartitionQueryRequest -): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/databases/sample2/documents"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.partition_query(request) - - -def test_partition_query_rest_pager(transport: str = "rest"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport, ) # Mock the http request call within the method and fake a response. @@ -9768,48 +7692,6 @@ def test_listen_rest_unimplemented(): client.listen(requests) -@pytest.mark.parametrize( - "request_type", - [ - firestore.ListCollectionIdsRequest, - dict, - ], -) -def test_list_collection_ids_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/databases/sample2/documents"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore.ListCollectionIdsResponse( - collection_ids=["collection_ids_value"], - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore.ListCollectionIdsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_collection_ids(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCollectionIdsPager) - assert response.collection_ids == ["collection_ids_value"] - assert response.next_page_token == "next_page_token_value" - - def test_list_collection_ids_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -9917,6 +7799,7 @@ def test_list_collection_ids_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_collection_ids(request) @@ -9934,85 +7817,6 @@ def test_list_collection_ids_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("parent",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_collection_ids_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "post_list_collection_ids" - ) as post, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_list_collection_ids" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore.ListCollectionIdsRequest.pb( - firestore.ListCollectionIdsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = firestore.ListCollectionIdsResponse.to_json( - firestore.ListCollectionIdsResponse() - ) - - request = firestore.ListCollectionIdsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore.ListCollectionIdsResponse() - - client.list_collection_ids( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_collection_ids_rest_bad_request( - transport: str = "rest", request_type=firestore.ListCollectionIdsRequest -): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/databases/sample2/documents"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_collection_ids(request) - - def test_list_collection_ids_rest_flattened(): client = FirestoreClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10041,6 +7845,7 @@ def test_list_collection_ids_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_collection_ids(**mock_args) @@ -10133,43 +7938,6 @@ def test_list_collection_ids_rest_pager(transport: str = "rest"): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize( - "request_type", - [ - firestore.BatchWriteRequest, - dict, - ], -) -def test_batch_write_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"database": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore.BatchWriteResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore.BatchWriteResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.batch_write(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.BatchWriteResponse) - - def test_batch_write_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -10271,6 +8039,7 @@ def test_batch_write_rest_required_fields(request_type=firestore.BatchWriteReque response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.batch_write(request) @@ -10288,235 +8057,36 @@ def test_batch_write_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("database",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_write_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "post_batch_write" - ) as post, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_batch_write" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore.BatchWriteRequest.pb(firestore.BatchWriteRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = firestore.BatchWriteResponse.to_json( - firestore.BatchWriteResponse() +def test_create_document_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - request = firestore.BatchWriteRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore.BatchWriteResponse() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - client.batch_write( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], + # Ensure method has been cached + assert client._transport.create_document in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client._transport._wrapped_methods[client._transport.create_document] = mock_rpc - pre.assert_called_once() - post.assert_called_once() + request = {} + client.create_document(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -def test_batch_write_rest_bad_request( - transport: str = "rest", request_type=firestore.BatchWriteRequest -): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"database": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.batch_write(request) - - -def test_batch_write_rest_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.CreateDocumentRequest, - dict, - ], -) -def test_create_document_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/databases/sample2/documents/sample3", - "collection_id": "sample4", - } - request_init["document"] = { - "name": "name_value", - "fields": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = firestore.CreateDocumentRequest.meta.fields["document"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["document"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["document"][field])): - del request_init["document"][field][i][subfield] - else: - del request_init["document"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = document.Document( - name="name_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_document(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - assert response.name == "name_value" - - -def test_create_document_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_document in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.create_document] = mock_rpc - - request = {} - client.create_document(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_document(request) + client.create_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10601,6 +8171,7 @@ def test_create_document_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_document(request) @@ -10632,92 +8203,6 @@ def test_create_document_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_document_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "post_create_document" - ) as post, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_create_document" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore.CreateDocumentRequest.pb( - firestore.CreateDocumentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = document.Document.to_json(document.Document()) - - request = firestore.CreateDocumentRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = document.Document() - - client.create_document( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_document_rest_bad_request( - transport: str = "rest", request_type=firestore.CreateDocumentRequest -): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/databases/sample2/documents/sample3", - "collection_id": "sample4", - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_document(request) - - -def test_create_document_rest_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.FirestoreGrpcTransport( @@ -10787,41 +8272,3125 @@ def test_transport_get_channel(): channel = transport.grpc_channel assert channel - transport = transports.FirestoreGrpcAsyncIOTransport( + transport = transports.FirestoreGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.FirestoreGrpcTransport, + transports.FirestoreGrpcAsyncIOTransport, + transports.FirestoreRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = FirestoreClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_document_empty_call_grpc(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_document), "__call__") as call: + call.return_value = document.Document() + client.get_document(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.GetDocumentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_documents_empty_call_grpc(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + call.return_value = firestore.ListDocumentsResponse() + client.list_documents(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.ListDocumentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_document_empty_call_grpc(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_document), "__call__") as call: + call.return_value = gf_document.Document() + client.update_document(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.UpdateDocumentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_document_empty_call_grpc(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_document), "__call__") as call: + call.return_value = None + client.delete_document(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.DeleteDocumentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_get_documents_empty_call_grpc(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents), "__call__" + ) as call: + call.return_value = iter([firestore.BatchGetDocumentsResponse()]) + client.batch_get_documents(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.BatchGetDocumentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_begin_transaction_empty_call_grpc(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), "__call__" + ) as call: + call.return_value = firestore.BeginTransactionResponse() + client.begin_transaction(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.BeginTransactionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_commit_empty_call_grpc(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.commit), "__call__") as call: + call.return_value = firestore.CommitResponse() + client.commit(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.CommitRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_rollback_empty_call_grpc(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.rollback), "__call__") as call: + call.return_value = None + client.rollback(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.RollbackRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_run_query_empty_call_grpc(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.run_query), "__call__") as call: + call.return_value = iter([firestore.RunQueryResponse()]) + client.run_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.RunQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_run_aggregation_query_empty_call_grpc(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_aggregation_query), "__call__" + ) as call: + call.return_value = iter([firestore.RunAggregationQueryResponse()]) + client.run_aggregation_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.RunAggregationQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_partition_query_empty_call_grpc(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.partition_query), "__call__") as call: + call.return_value = firestore.PartitionQueryResponse() + client.partition_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.PartitionQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_collection_ids_empty_call_grpc(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_collection_ids), "__call__" + ) as call: + call.return_value = firestore.ListCollectionIdsResponse() + client.list_collection_ids(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.ListCollectionIdsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_write_empty_call_grpc(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.batch_write), "__call__") as call: + call.return_value = firestore.BatchWriteResponse() + client.batch_write(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.BatchWriteRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_document_empty_call_grpc(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_document), "__call__") as call: + call.return_value = document.Document() + client.create_document(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.CreateDocumentRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = FirestoreAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = FirestoreAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_document_empty_call_grpc_asyncio(): + client = FirestoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document.Document( + name="name_value", + ) + ) + await client.get_document(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.GetDocumentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_documents_empty_call_grpc_asyncio(): + client = FirestoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + firestore.ListDocumentsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_documents(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.ListDocumentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_document_empty_call_grpc_asyncio(): + client = FirestoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gf_document.Document( + name="name_value", + ) + ) + await client.update_document(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.UpdateDocumentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_document_empty_call_grpc_asyncio(): + client = FirestoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_document(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.DeleteDocumentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_batch_get_documents_empty_call_grpc_asyncio(): + client = FirestoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[firestore.BatchGetDocumentsResponse()] + ) + await client.batch_get_documents(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.BatchGetDocumentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_begin_transaction_empty_call_grpc_asyncio(): + client = FirestoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + firestore.BeginTransactionResponse( + transaction=b"transaction_blob", + ) + ) + await client.begin_transaction(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.BeginTransactionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_commit_empty_call_grpc_asyncio(): + client = FirestoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.commit), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + firestore.CommitResponse() + ) + await client.commit(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.CommitRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_rollback_empty_call_grpc_asyncio(): + client = FirestoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.rollback), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.rollback(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.RollbackRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_run_query_empty_call_grpc_asyncio(): + client = FirestoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.run_query), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[firestore.RunQueryResponse()] + ) + await client.run_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.RunQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_run_aggregation_query_empty_call_grpc_asyncio(): + client = FirestoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_aggregation_query), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[firestore.RunAggregationQueryResponse()] + ) + await client.run_aggregation_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.RunAggregationQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_partition_query_empty_call_grpc_asyncio(): + client = FirestoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.partition_query), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + firestore.PartitionQueryResponse( + next_page_token="next_page_token_value", + ) + ) + await client.partition_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.PartitionQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_collection_ids_empty_call_grpc_asyncio(): + client = FirestoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_collection_ids), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + firestore.ListCollectionIdsResponse( + collection_ids=["collection_ids_value"], + next_page_token="next_page_token_value", + ) + ) + await client.list_collection_ids(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.ListCollectionIdsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_batch_write_empty_call_grpc_asyncio(): + client = FirestoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.batch_write), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + firestore.BatchWriteResponse() + ) + await client.batch_write(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.BatchWriteRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_document_empty_call_grpc_asyncio(): + client = FirestoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document.Document( + name="name_value", + ) + ) + await client.create_document(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.CreateDocumentRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = FirestoreClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_get_document_rest_bad_request(request_type=firestore.GetDocumentRequest): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/databases/sample2/documents/sample3/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_document(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore.GetDocumentRequest, + dict, + ], +) +def test_get_document_rest_call_success(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/databases/sample2/documents/sample3/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document.Document( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, document.Document) + assert response.name == "name_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_document_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreRestInterceptor, "post_get_document" + ) as post, mock.patch.object( + transports.FirestoreRestInterceptor, "pre_get_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore.GetDocumentRequest.pb(firestore.GetDocumentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = document.Document.to_json(document.Document()) + req.return_value.content = return_value + + request = firestore.GetDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document.Document() + + client.get_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_documents_rest_bad_request(request_type=firestore.ListDocumentsRequest): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/databases/sample2/documents/sample3/sample4", + "collection_id": "sample5", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_documents(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore.ListDocumentsRequest, + dict, + ], +) +def test_list_documents_rest_call_success(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/databases/sample2/documents/sample3/sample4", + "collection_id": "sample5", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = firestore.ListDocumentsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore.ListDocumentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_documents(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDocumentsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_documents_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreRestInterceptor, "post_list_documents" + ) as post, mock.patch.object( + transports.FirestoreRestInterceptor, "pre_list_documents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore.ListDocumentsRequest.pb(firestore.ListDocumentsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = firestore.ListDocumentsResponse.to_json( + firestore.ListDocumentsResponse() + ) + req.return_value.content = return_value + + request = firestore.ListDocumentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore.ListDocumentsResponse() + + client.list_documents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_document_rest_bad_request(request_type=firestore.UpdateDocumentRequest): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "document": { + "name": "projects/sample1/databases/sample2/documents/sample3/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_document(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore.UpdateDocumentRequest, + dict, + ], +) +def test_update_document_rest_call_success(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "document": { + "name": "projects/sample1/databases/sample2/documents/sample3/sample4" + } + } + request_init["document"] = { + "name": "projects/sample1/databases/sample2/documents/sample3/sample4", + "fields": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = firestore.UpdateDocumentRequest.meta.fields["document"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["document"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["document"][field])): + del request_init["document"][field][i][subfield] + else: + del request_init["document"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gf_document.Document( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gf_document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gf_document.Document) + assert response.name == "name_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_document_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreRestInterceptor, "post_update_document" + ) as post, mock.patch.object( + transports.FirestoreRestInterceptor, "pre_update_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore.UpdateDocumentRequest.pb( + firestore.UpdateDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gf_document.Document.to_json(gf_document.Document()) + req.return_value.content = return_value + + request = firestore.UpdateDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gf_document.Document() + + client.update_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_document_rest_bad_request(request_type=firestore.DeleteDocumentRequest): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/databases/sample2/documents/sample3/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_document(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore.DeleteDocumentRequest, + dict, + ], +) +def test_delete_document_rest_call_success(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/databases/sample2/documents/sample3/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_document(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_document_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreRestInterceptor, "pre_delete_document" + ) as pre: + pre.assert_not_called() + pb_message = firestore.DeleteDocumentRequest.pb( + firestore.DeleteDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = firestore.DeleteDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_batch_get_documents_rest_bad_request( + request_type=firestore.BatchGetDocumentsRequest, +): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"database": "projects/sample1/databases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.batch_get_documents(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore.BatchGetDocumentsRequest, + dict, + ], +) +def test_batch_get_documents_rest_call_success(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"database": "projects/sample1/databases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = firestore.BatchGetDocumentsResponse( + transaction=b"transaction_blob", + missing="missing_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore.BatchGetDocumentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + response_value.iter_content = mock.Mock(return_value=iter(json_return_value)) + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.batch_get_documents(request) + + assert isinstance(response, Iterable) + response = next(response) + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore.BatchGetDocumentsResponse) + assert response.transaction == b"transaction_blob" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_get_documents_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreRestInterceptor, "post_batch_get_documents" + ) as post, mock.patch.object( + transports.FirestoreRestInterceptor, "pre_batch_get_documents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore.BatchGetDocumentsRequest.pb( + firestore.BatchGetDocumentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = firestore.BatchGetDocumentsResponse.to_json( + firestore.BatchGetDocumentsResponse() + ) + req.return_value.iter_content = mock.Mock(return_value=iter(return_value)) + + request = firestore.BatchGetDocumentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore.BatchGetDocumentsResponse() + + client.batch_get_documents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_begin_transaction_rest_bad_request( + request_type=firestore.BeginTransactionRequest, +): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"database": "projects/sample1/databases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.begin_transaction(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore.BeginTransactionRequest, + dict, + ], +) +def test_begin_transaction_rest_call_success(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"database": "projects/sample1/databases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = firestore.BeginTransactionResponse( + transaction=b"transaction_blob", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore.BeginTransactionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.begin_transaction(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore.BeginTransactionResponse) + assert response.transaction == b"transaction_blob" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_begin_transaction_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreRestInterceptor, "post_begin_transaction" + ) as post, mock.patch.object( + transports.FirestoreRestInterceptor, "pre_begin_transaction" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore.BeginTransactionRequest.pb( + firestore.BeginTransactionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = firestore.BeginTransactionResponse.to_json( + firestore.BeginTransactionResponse() + ) + req.return_value.content = return_value + + request = firestore.BeginTransactionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore.BeginTransactionResponse() + + client.begin_transaction( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_commit_rest_bad_request(request_type=firestore.CommitRequest): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"database": "projects/sample1/databases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.commit(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore.CommitRequest, + dict, + ], +) +def test_commit_rest_call_success(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"database": "projects/sample1/databases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = firestore.CommitResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore.CommitResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.commit(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore.CommitResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_commit_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreRestInterceptor, "post_commit" + ) as post, mock.patch.object( + transports.FirestoreRestInterceptor, "pre_commit" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore.CommitRequest.pb(firestore.CommitRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = firestore.CommitResponse.to_json(firestore.CommitResponse()) + req.return_value.content = return_value + + request = firestore.CommitRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore.CommitResponse() + + client.commit( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_rollback_rest_bad_request(request_type=firestore.RollbackRequest): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"database": "projects/sample1/databases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.rollback(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore.RollbackRequest, + dict, + ], +) +def test_rollback_rest_call_success(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"database": "projects/sample1/databases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.rollback(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_rollback_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreRestInterceptor, "pre_rollback" + ) as pre: + pre.assert_not_called() + pb_message = firestore.RollbackRequest.pb(firestore.RollbackRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = firestore.RollbackRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.rollback( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_run_query_rest_bad_request(request_type=firestore.RunQueryRequest): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/databases/sample2/documents"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.run_query(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore.RunQueryRequest, + dict, + ], +) +def test_run_query_rest_call_success(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/databases/sample2/documents"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = firestore.RunQueryResponse( + transaction=b"transaction_blob", + skipped_results=1633, + done=True, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore.RunQueryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + response_value.iter_content = mock.Mock(return_value=iter(json_return_value)) + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.run_query(request) + + assert isinstance(response, Iterable) + response = next(response) + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore.RunQueryResponse) + assert response.transaction == b"transaction_blob" + assert response.skipped_results == 1633 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_query_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreRestInterceptor, "post_run_query" + ) as post, mock.patch.object( + transports.FirestoreRestInterceptor, "pre_run_query" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore.RunQueryRequest.pb(firestore.RunQueryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = firestore.RunQueryResponse.to_json(firestore.RunQueryResponse()) + req.return_value.iter_content = mock.Mock(return_value=iter(return_value)) + + request = firestore.RunQueryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore.RunQueryResponse() + + client.run_query( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_run_aggregation_query_rest_bad_request( + request_type=firestore.RunAggregationQueryRequest, +): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/databases/sample2/documents"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.run_aggregation_query(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore.RunAggregationQueryRequest, + dict, + ], +) +def test_run_aggregation_query_rest_call_success(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/databases/sample2/documents"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = firestore.RunAggregationQueryResponse( + transaction=b"transaction_blob", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore.RunAggregationQueryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + response_value.iter_content = mock.Mock(return_value=iter(json_return_value)) + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.run_aggregation_query(request) + + assert isinstance(response, Iterable) + response = next(response) + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore.RunAggregationQueryResponse) + assert response.transaction == b"transaction_blob" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_aggregation_query_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreRestInterceptor, "post_run_aggregation_query" + ) as post, mock.patch.object( + transports.FirestoreRestInterceptor, "pre_run_aggregation_query" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore.RunAggregationQueryRequest.pb( + firestore.RunAggregationQueryRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = firestore.RunAggregationQueryResponse.to_json( + firestore.RunAggregationQueryResponse() + ) + req.return_value.iter_content = mock.Mock(return_value=iter(return_value)) + + request = firestore.RunAggregationQueryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore.RunAggregationQueryResponse() + + client.run_aggregation_query( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_partition_query_rest_bad_request(request_type=firestore.PartitionQueryRequest): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/databases/sample2/documents"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.partition_query(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore.PartitionQueryRequest, + dict, + ], +) +def test_partition_query_rest_call_success(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/databases/sample2/documents"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = firestore.PartitionQueryResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore.PartitionQueryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.partition_query(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.PartitionQueryPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_partition_query_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreRestInterceptor, "post_partition_query" + ) as post, mock.patch.object( + transports.FirestoreRestInterceptor, "pre_partition_query" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore.PartitionQueryRequest.pb( + firestore.PartitionQueryRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = firestore.PartitionQueryResponse.to_json( + firestore.PartitionQueryResponse() + ) + req.return_value.content = return_value + + request = firestore.PartitionQueryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore.PartitionQueryResponse() + + client.partition_query( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_write_rest_error(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + with pytest.raises(NotImplementedError) as not_implemented_error: + client.write({}) + assert "Method Write is not available over REST transport" in str( + not_implemented_error.value + ) + + +def test_listen_rest_error(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + with pytest.raises(NotImplementedError) as not_implemented_error: + client.listen({}) + assert "Method Listen is not available over REST transport" in str( + not_implemented_error.value + ) + + +def test_list_collection_ids_rest_bad_request( + request_type=firestore.ListCollectionIdsRequest, +): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/databases/sample2/documents"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_collection_ids(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore.ListCollectionIdsRequest, + dict, + ], +) +def test_list_collection_ids_rest_call_success(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/databases/sample2/documents"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = firestore.ListCollectionIdsResponse( + collection_ids=["collection_ids_value"], + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore.ListCollectionIdsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_collection_ids(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCollectionIdsPager) + assert response.collection_ids == ["collection_ids_value"] + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_collection_ids_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreRestInterceptor, "post_list_collection_ids" + ) as post, mock.patch.object( + transports.FirestoreRestInterceptor, "pre_list_collection_ids" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore.ListCollectionIdsRequest.pb( + firestore.ListCollectionIdsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = firestore.ListCollectionIdsResponse.to_json( + firestore.ListCollectionIdsResponse() + ) + req.return_value.content = return_value + + request = firestore.ListCollectionIdsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore.ListCollectionIdsResponse() + + client.list_collection_ids( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_write_rest_bad_request(request_type=firestore.BatchWriteRequest): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"database": "projects/sample1/databases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.batch_write(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore.BatchWriteRequest, + dict, + ], +) +def test_batch_write_rest_call_success(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"database": "projects/sample1/databases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = firestore.BatchWriteResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore.BatchWriteResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.batch_write(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore.BatchWriteResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_write_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreRestInterceptor, "post_batch_write" + ) as post, mock.patch.object( + transports.FirestoreRestInterceptor, "pre_batch_write" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore.BatchWriteRequest.pb(firestore.BatchWriteRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = firestore.BatchWriteResponse.to_json( + firestore.BatchWriteResponse() + ) + req.return_value.content = return_value + + request = firestore.BatchWriteRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore.BatchWriteResponse() + + client.batch_write( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_document_rest_bad_request(request_type=firestore.CreateDocumentRequest): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/databases/sample2/documents/sample3", + "collection_id": "sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_document(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore.CreateDocumentRequest, + dict, + ], +) +def test_create_document_rest_call_success(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/databases/sample2/documents/sample3", + "collection_id": "sample4", + } + request_init["document"] = { + "name": "name_value", + "fields": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = firestore.CreateDocumentRequest.meta.fields["document"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["document"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["document"][field])): + del request_init["document"][field][i][subfield] + else: + del request_init["document"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document.Document( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, document.Document) + assert response.name == "name_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_document_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreRestInterceptor, "post_create_document" + ) as post, mock.patch.object( + transports.FirestoreRestInterceptor, "pre_create_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore.CreateDocumentRequest.pb( + firestore.CreateDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = document.Document.to_json(document.Document()) + req.return_value.content = return_value + + request = firestore.CreateDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document.Document() + + client.create_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_cancel_operation_rest_bad_request( + request_type=operations_pb2.CancelOperationRequest, +): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/databases/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/databases/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + request_type=operations_pb2.DeleteOperationRequest, +): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/databases/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/databases/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/databases/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/databases/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/databases/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/databases/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_initialize_client_w_rest(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_document_empty_call_rest(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_document), "__call__") as call: + client.get_document(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.GetDocumentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_documents_empty_call_rest(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + client.list_documents(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.ListDocumentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_document_empty_call_rest(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_document), "__call__") as call: + client.update_document(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.UpdateDocumentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_document_empty_call_rest(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_document), "__call__") as call: + client.delete_document(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.DeleteDocumentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_get_documents_empty_call_rest(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents), "__call__" + ) as call: + client.batch_get_documents(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.BatchGetDocumentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_begin_transaction_empty_call_rest(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), "__call__" + ) as call: + client.begin_transaction(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.BeginTransactionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_commit_empty_call_rest(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.commit), "__call__") as call: + client.commit(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.CommitRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_rollback_empty_call_rest(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.rollback), "__call__") as call: + client.rollback(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.RollbackRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_run_query_empty_call_rest(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.run_query), "__call__") as call: + client.run_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.RunQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_run_aggregation_query_empty_call_rest(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_aggregation_query), "__call__" + ) as call: + client.run_aggregation_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.RunAggregationQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_partition_query_empty_call_rest(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.partition_query), "__call__") as call: + client.partition_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.PartitionQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_collection_ids_empty_call_rest(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_collection_ids), "__call__" + ) as call: + client.list_collection_ids(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.ListCollectionIdsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_write_empty_call_rest(): + client = FirestoreClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - channel = transport.grpc_channel - assert channel + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.batch_write), "__call__") as call: + client.batch_write(request=None) -@pytest.mark.parametrize( - "transport_class", - [ - transports.FirestoreGrpcTransport, - transports.FirestoreGrpcAsyncIOTransport, - transports.FirestoreRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.BatchWriteRequest() + assert args[0] == request_msg -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = FirestoreClient.get_transport_class(transport_name)( + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_document_empty_call_rest(): + client = FirestoreClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert transport.kind == transport_name + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_document), "__call__") as call: + client.create_document(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.CreateDocumentRequest() + + assert args[0] == request_msg def test_transport_grpc_default(): @@ -11313,375 +11882,129 @@ def test_firestore_transport_channel_mtls_with_adc(transport_class): def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = FirestoreClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = FirestoreClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreClient.parse_common_billing_account_path(path) - assert expected == actual - - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = FirestoreClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = FirestoreClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = FirestoreClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = FirestoreClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format( - project=project, - ) - actual = FirestoreClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = FirestoreClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = FirestoreClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = FirestoreClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.FirestoreTransport, "_prep_wrapped_messages" - ) as prep: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.FirestoreTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = FirestoreClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_cancel_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.CancelOperationRequest -): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/databases/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/databases/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest -): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/databases/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = FirestoreClient.common_billing_account_path(billing_account) + assert expected == actual -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.DeleteOperationRequest, - dict, - ], -) -def test_delete_operation_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/databases/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = FirestoreClient.common_billing_account_path(**expected) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" + # Check that the path construction is reversible. + actual = FirestoreClient.parse_common_billing_account_path(path) + assert expected == actual - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_operation(request) +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = FirestoreClient.common_folder_path(folder) + assert expected == actual - # Establish that the response is the type that we expect. - assert response is None +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = FirestoreClient.common_folder_path(**expected) -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Check that the path construction is reversible. + actual = FirestoreClient.parse_common_folder_path(path) + assert expected == actual - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/databases/sample2/operations/sample3"}, request + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, ) + actual = FirestoreClient.common_organization_path(organization) + assert expected == actual - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = FirestoreClient.common_organization_path(**expected) -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/databases/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() + # Check that the path construction is reversible. + actual = FirestoreClient.parse_common_organization_path(path) + assert expected == actual - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = FirestoreClient.common_project_path(project) + assert expected == actual - response = client.get_operation(request) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = FirestoreClient.common_project_path(**expected) + # Check that the path construction is reversible. + actual = FirestoreClient.parse_common_project_path(path) + assert expected == actual -def test_list_operations_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.ListOperationsRequest -): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/databases/sample2"}, request +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, ) + actual = FirestoreClient.common_location_path(project, location) + assert expected == actual - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = FirestoreClient.common_location_path(**expected) -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.ListOperationsRequest, - dict, - ], -) -def test_list_operations_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() + # Check that the path construction is reversible. + actual = FirestoreClient.parse_common_location_path(path) + assert expected == actual - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() - response = client.list_operations(request) + with mock.patch.object( + transports.FirestoreTransport, "_prep_wrapped_messages" + ) as prep: + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + with mock.patch.object( + transports.FirestoreTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = FirestoreClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) def test_delete_operation(transport: str = "grpc"): @@ -11711,7 +12034,7 @@ def test_delete_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_delete_operation_async(transport: str = "grpc_asyncio"): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -11764,7 +12087,7 @@ def test_delete_operation_field_headers(): @pytest.mark.asyncio async def test_delete_operation_field_headers_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -11809,7 +12132,7 @@ def test_delete_operation_from_dict(): @pytest.mark.asyncio async def test_delete_operation_from_dict_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: @@ -11850,7 +12173,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -11903,7 +12226,7 @@ def test_cancel_operation_field_headers(): @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -11948,7 +12271,7 @@ def test_cancel_operation_from_dict(): @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -11989,7 +12312,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -12044,7 +12367,7 @@ def test_get_operation_field_headers(): @pytest.mark.asyncio async def test_get_operation_field_headers_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12091,7 +12414,7 @@ def test_get_operation_from_dict(): @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -12134,7 +12457,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -12189,7 +12512,7 @@ def test_list_operations_field_headers(): @pytest.mark.asyncio async def test_list_operations_field_headers_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12236,7 +12559,7 @@ def test_list_operations_from_dict(): @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -12252,22 +12575,41 @@ async def test_list_operations_from_dict_async(): call.assert_called() -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } +def test_transport_close_grpc(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() - for transport, close_name in transports.items(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - with mock.patch.object( - type(getattr(client.transport, close_name)), "close" - ) as close: - with client: - close.assert_not_called() - close.assert_called_once() + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = FirestoreAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() def test_client_ctx(): From fbd6ab2704a2f02542bef291e44ffe783130c061 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 24 Feb 2025 15:35:37 -0800 Subject: [PATCH 17/37] fix: client-side path validation for batch.update (#1021) --- google/cloud/firestore_v1/field_path.py | 6 ++-- tests/system/test_system.py | 40 +++++++++++++++++++++++++ 2 files changed, 42 insertions(+), 4 deletions(-) diff --git a/google/cloud/firestore_v1/field_path.py b/google/cloud/firestore_v1/field_path.py index c3383cbb8c..048eb64d08 100644 --- a/google/cloud/firestore_v1/field_path.py +++ b/google/cloud/firestore_v1/field_path.py @@ -31,7 +31,7 @@ _ESCAPED_BACKTICK = _BACKSLASH + _BACKTICK _SIMPLE_FIELD_NAME = re.compile("^[_a-zA-Z][_a-zA-Z0-9]*$") -_LEADING_ALPHA_INVALID = re.compile("^[_a-zA-Z][_a-zA-Z0-9]*[^_a-zA-Z0-9]") +_LEADING_ALPHA_INVALID = re.compile(r"^[_a-zA-Z][_a-zA-Z0-9]*[~*/\[\]]") PATH_ELEMENT_TOKENS = [ ("SIMPLE", r"[_a-zA-Z][_a-zA-Z0-9]*"), # unquoted elements ("QUOTED", r"`(?:\\`|[^`])*?`"), # quoted elements, unquoted @@ -311,9 +311,7 @@ def from_string(cls, path_string: str): raise ValueError("Empty element") if _LEADING_ALPHA_INVALID.match(element): raise ValueError( - "Non-alphanum char in element with leading alpha: {}".format( - element - ) + "Invalid char in element with leading alpha: {}".format(element) ) return FieldPath(*elements) diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 386bc5deb2..ba72d6ca02 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -3371,6 +3371,28 @@ def in_transaction(transaction): assert inner_fn_ran is True +@pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) +def test_transaction_w_uuid(client, cleanup, database): + """ + https://github.com/googleapis/python-firestore/issues/1012 + """ + collection_id = "uuid_collection" + UNIQUE_RESOURCE_ID + doc_ref = client.document(collection_id, "doc") + cleanup(doc_ref.delete) + key = "b7992822-eacb-40be-8af6-559b9e2fb0b7" + doc_ref.create({key: "I'm a UUID!"}) + + @firestore.transactional + def update_doc(tx, doc_ref, key, value): + tx.update(doc_ref, {key: value}) + + expected = "UPDATED VALUE" + update_doc(client.transaction(), doc_ref, key, expected) + # read updated doc + snapshot = doc_ref.get() + assert snapshot.to_dict()[key] == expected + + @pytest.mark.skipif( FIRESTORE_EMULATOR, reason="Query profile not supported in emulator." ) @@ -3426,6 +3448,24 @@ def in_transaction(transaction): assert inner_fn_ran is True +@pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) +def test_update_w_uuid(client, cleanup, database): + """ + https://github.com/googleapis/python-firestore/issues/1012 + """ + collection_id = "uuid_collection" + UNIQUE_RESOURCE_ID + doc_ref = client.document(collection_id, "doc") + cleanup(doc_ref.delete) + key = "b7992822-eacb-40be-8af6-559b9e2fb0b7" + doc_ref.create({key: "I'm a UUID!"}) + + expected = "UPDATED VALUE" + doc_ref.update({key: expected}) + # read updated doc + snapshot = doc_ref.get() + assert snapshot.to_dict()[key] == expected + + @pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) def test_query_in_transaction_with_read_time(client, cleanup, database): """ From 33c134bf9074605e364761923ca452eb69b09cc8 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 25 Feb 2025 10:01:15 -0800 Subject: [PATCH 18/37] fix: Watch thread deadlock on exit (#1014) * fix: fix thread cleanup when destroying Watch instance * added test for on_snapshot while closing --- google/cloud/firestore_v1/watch.py | 3 +++ tests/unit/v1/test_watch.py | 9 +++++++++ 2 files changed, 12 insertions(+) diff --git a/google/cloud/firestore_v1/watch.py b/google/cloud/firestore_v1/watch.py index 62b53ef4a9..79933aecae 100644 --- a/google/cloud/firestore_v1/watch.py +++ b/google/cloud/firestore_v1/watch.py @@ -440,6 +440,9 @@ def on_snapshot(self, proto): proto(`google.cloud.firestore_v1.types.ListenResponse`): Callback method that receives a object to """ + if self._closing.locked(): + # don't process on_snapshot responses while spinning down, to prevent deadlock + return if proto is None: self.close() return diff --git a/tests/unit/v1/test_watch.py b/tests/unit/v1/test_watch.py index 094248e933..6d8c12abc0 100644 --- a/tests/unit/v1/test_watch.py +++ b/tests/unit/v1/test_watch.py @@ -400,6 +400,15 @@ def test_watch_on_snapshot_target_w_none(): assert inst._rpc is None +def test_watch_on_snapshot_while_closing(): + inst = _make_watch() + inst.close = mock.Mock() + with inst._closing: + inst.on_snapshot(mock.Mock()) + # close should not be called again when already closing + inst.close.assert_not_called() + + def test_watch_on_snapshot_target_no_change_no_target_ids_not_current(): inst = _make_watch() proto = _make_listen_response() From 4d24a958bf77aec3df8777bf91a6d647fee8ba99 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 25 Feb 2025 10:52:48 -0800 Subject: [PATCH 19/37] chore(python): fix typo in README (#1015) Source-Link: https://github.com/googleapis/synthtool/commit/93e1685311a3940e713fd00820aa9937d496f544 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:631b4a35a4f9dd5e97740a97c4c117646eb85b35e103844dc49d152bd18694cd Co-authored-by: Owl Bot Co-authored-by: Daniel Sanche --- .github/.OwlBot.lock.yaml | 4 +- .kokoro/docker/docs/requirements.in | 1 + .kokoro/docker/docs/requirements.txt | 243 ++++++++++++++++++++++++++- .kokoro/publish-docs.sh | 4 - README.rst | 4 +- 5 files changed, 239 insertions(+), 17 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 10cf433a8b..7a8100470c 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:8ff1efe878e18bd82a0fb7b70bb86f77e7ab6901fed394440b6135db0ba8d84a -# created: 2025-01-09T12:01:16.422459506Z + digest: sha256:631b4a35a4f9dd5e97740a97c4c117646eb85b35e103844dc49d152bd18694cd +# created: 2025-02-05T14:40:56.685429494Z diff --git a/.kokoro/docker/docs/requirements.in b/.kokoro/docker/docs/requirements.in index 816817c672..586bd07037 100644 --- a/.kokoro/docker/docs/requirements.in +++ b/.kokoro/docker/docs/requirements.in @@ -1 +1,2 @@ nox +gcp-docuploader diff --git a/.kokoro/docker/docs/requirements.txt b/.kokoro/docker/docs/requirements.txt index f99a5c4aac..a9360a25b7 100644 --- a/.kokoro/docker/docs/requirements.txt +++ b/.kokoro/docker/docs/requirements.txt @@ -2,16 +2,124 @@ # This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --allow-unsafe --generate-hashes synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in +# pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.5.2 \ - --hash=sha256:036d020d79048a5d525bc63880d7a4b8d1668566b8a76daf1144c0bbe0f63472 \ - --hash=sha256:23146ed7ac4403b70bd6026402468942ceba34a6732255b9edf5b7354f68a6bb +argcomplete==3.5.3 \ + --hash=sha256:2ab2c4a215c59fd6caaff41a869480a23e8f6a5f910b266c1808037f4e375b61 \ + --hash=sha256:c12bf50eded8aebb298c7b7da7a5ff3ee24dffd9f5281867dfe1424b58c55392 # via nox +cachetools==5.5.0 \ + --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ + --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a + # via google-auth +certifi==2024.12.14 \ + --hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56 \ + --hash=sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db + # via requests +charset-normalizer==3.4.1 \ + --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \ + --hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \ + --hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \ + --hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \ + --hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \ + --hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \ + --hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \ + --hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \ + --hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \ + --hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \ + --hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \ + --hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \ + --hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \ + --hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \ + --hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \ + --hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \ + --hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \ + --hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \ + --hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \ + --hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \ + --hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \ + --hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \ + --hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \ + --hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \ + --hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \ + --hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \ + --hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \ + --hash=sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3 \ + --hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \ + --hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \ + --hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \ + --hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \ + --hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \ + --hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \ + --hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \ + --hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \ + --hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \ + --hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \ + --hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \ + --hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \ + --hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \ + --hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \ + --hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \ + --hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \ + --hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \ + --hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \ + --hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \ + --hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \ + --hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \ + --hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \ + --hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \ + --hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \ + --hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \ + --hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \ + --hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \ + --hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \ + --hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \ + --hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \ + --hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \ + --hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \ + --hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \ + --hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \ + --hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \ + --hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \ + --hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \ + --hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \ + --hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \ + --hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \ + --hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \ + --hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \ + --hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \ + --hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \ + --hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \ + --hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \ + --hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \ + --hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \ + --hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \ + --hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \ + --hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \ + --hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \ + --hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \ + --hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \ + --hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \ + --hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \ + --hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \ + --hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \ + --hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \ + --hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \ + --hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \ + --hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \ + --hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \ + --hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616 + # via requests +click==8.1.8 \ + --hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \ + --hash=sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a + # via gcp-docuploader colorlog==6.9.0 \ --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \ --hash=sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2 - # via nox + # via + # gcp-docuploader + # nox distlib==0.3.9 \ --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 @@ -20,10 +128,78 @@ filelock==3.16.1 \ --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 # via virtualenv +gcp-docuploader==0.6.5 \ + --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ + --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea + # via -r requirements.in +google-api-core==2.24.0 \ + --hash=sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9 \ + --hash=sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf + # via + # google-cloud-core + # google-cloud-storage +google-auth==2.37.0 \ + --hash=sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00 \ + --hash=sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0 + # via + # google-api-core + # google-cloud-core + # google-cloud-storage +google-cloud-core==2.4.1 \ + --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ + --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 + # via google-cloud-storage +google-cloud-storage==2.19.0 \ + --hash=sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba \ + --hash=sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2 + # via gcp-docuploader +google-crc32c==1.6.0 \ + --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \ + --hash=sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d \ + --hash=sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e \ + --hash=sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57 \ + --hash=sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2 \ + --hash=sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8 \ + --hash=sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc \ + --hash=sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42 \ + --hash=sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f \ + --hash=sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa \ + --hash=sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b \ + --hash=sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc \ + --hash=sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760 \ + --hash=sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d \ + --hash=sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7 \ + --hash=sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d \ + --hash=sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0 \ + --hash=sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3 \ + --hash=sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3 \ + --hash=sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00 \ + --hash=sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871 \ + --hash=sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c \ + --hash=sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9 \ + --hash=sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205 \ + --hash=sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc \ + --hash=sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d \ + --hash=sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4 + # via + # google-cloud-storage + # google-resumable-media +google-resumable-media==2.7.2 \ + --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \ + --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0 + # via google-cloud-storage +googleapis-common-protos==1.66.0 \ + --hash=sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c \ + --hash=sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed + # via google-api-core +idna==3.10 \ + --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ + --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 + # via requests nox==2024.10.9 \ --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 - # via -r synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in + # via -r requirements.in packaging==24.2 \ --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f @@ -32,6 +208,51 @@ platformdirs==4.3.6 \ --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via virtualenv +proto-plus==1.25.0 \ + --hash=sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961 \ + --hash=sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91 + # via google-api-core +protobuf==5.29.3 \ + --hash=sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f \ + --hash=sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7 \ + --hash=sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888 \ + --hash=sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620 \ + --hash=sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da \ + --hash=sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252 \ + --hash=sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a \ + --hash=sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e \ + --hash=sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107 \ + --hash=sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f \ + --hash=sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84 + # via + # gcp-docuploader + # google-api-core + # googleapis-common-protos + # proto-plus +pyasn1==0.6.1 \ + --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \ + --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034 + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.4.1 \ + --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ + --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c + # via google-auth +requests==2.32.3 \ + --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ + --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 + # via + # google-api-core + # google-cloud-storage +rsa==4.9 \ + --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ + --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 + # via google-auth +six==1.17.0 \ + --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \ + --hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81 + # via gcp-docuploader tomli==2.2.1 \ --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ @@ -66,7 +287,11 @@ tomli==2.2.1 \ --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 # via nox -virtualenv==20.28.0 \ - --hash=sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0 \ - --hash=sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa +urllib3==2.3.0 \ + --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \ + --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d + # via requests +virtualenv==20.28.1 \ + --hash=sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb \ + --hash=sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329 # via nox diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh index 233205d580..4ed4aaf134 100755 --- a/.kokoro/publish-docs.sh +++ b/.kokoro/publish-docs.sh @@ -20,10 +20,6 @@ export PYTHONUNBUFFERED=1 export PATH="${HOME}/.local/bin:${PATH}" -# Install nox -python3.10 -m pip install --require-hashes -r .kokoro/requirements.txt -python3.10 -m nox --version - # build docs nox -s docs diff --git a/README.rst b/README.rst index e2106834ed..4b1cdf7d90 100644 --- a/README.rst +++ b/README.rst @@ -26,12 +26,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ 3. `Enable the Cloud Firestore API.`_ -4. `Setup Authentication.`_ +4. `Set up Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the Cloud Firestore API.: https://cloud.google.com/firestore -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ From 029fcfb081e8647081ee098cbfe0721b4493cd3e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 25 Feb 2025 11:38:32 -0800 Subject: [PATCH 20/37] chore(python): conditionally load credentials in .kokoro/build.sh (#1022) Source-Link: https://github.com/googleapis/synthtool/commit/aa69fb74717c8f4c58c60f8cc101d3f4b2c07b09 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f016446d6e520e5fb552c45b110cba3f217bffdd3d06bdddd076e9e6d13266cf Co-authored-by: Owl Bot Co-authored-by: Daniel Sanche --- .github/.OwlBot.lock.yaml | 4 +- .kokoro/build.sh | 20 ++++++--- README.rst | 89 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 105 insertions(+), 8 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 7a8100470c..3f7634f25f 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:631b4a35a4f9dd5e97740a97c4c117646eb85b35e103844dc49d152bd18694cd -# created: 2025-02-05T14:40:56.685429494Z + digest: sha256:f016446d6e520e5fb552c45b110cba3f217bffdd3d06bdddd076e9e6d13266cf +# created: 2025-02-21T19:32:52.01306189Z diff --git a/.kokoro/build.sh b/.kokoro/build.sh index cfd7fc4bcb..d84680bd8d 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -15,11 +15,13 @@ set -eo pipefail +CURRENT_DIR=$(dirname "${BASH_SOURCE[0]}") + if [[ -z "${PROJECT_ROOT:-}" ]]; then - PROJECT_ROOT="github/python-firestore" + PROJECT_ROOT=$(realpath "${CURRENT_DIR}/..") fi -cd "${PROJECT_ROOT}" +pushd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -31,10 +33,16 @@ env | grep KOKORO export FIRESTORE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/firebase-credentials.json # Setup service account credentials. -export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +if [[ -f "${KOKORO_GFILE_DIR}/service-account.json" ]] +then + export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +fi # Setup project id. -export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") +if [[ -f "${KOKORO_GFILE_DIR}/project-id.json" ]] +then + export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") +fi # If this is a continuous build, send the test log to the FlakyBot. # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. @@ -49,7 +57,7 @@ fi # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3 -m nox -s ${NOX_SESSION:-} + python3 -m nox -s ${NOX_SESSION:-} else - python3 -m nox + python3 -m nox fi diff --git a/README.rst b/README.rst index 4b1cdf7d90..0171769aa9 100644 --- a/README.rst +++ b/README.rst @@ -106,3 +106,92 @@ Next Steps .. _Cloud Firestore API Product documentation: https://cloud.google.com/firestore .. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst + +Logging +------- + +This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. +Note the following: + +#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. +#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. +#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. + +Simple, environment-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google +logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged +messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging +event. + +A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. + +- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. +- Invalid logging scopes: :code:`foo`, :code:`123`, etc. + +**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. + +Environment-Based Examples +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- Enabling the default handler for all Google-based loggers + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google + +- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 + + +Advanced, code-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can also configure a valid logging scope using Python's standard `logging` mechanism. + +Code-Based Examples +^^^^^^^^^^^^^^^^^^^ + +- Configuring a handler for all Google-based loggers + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google.cloud.library_v1") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +Logging details +~~~~~~~~~~~~~~~ + +#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root + logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set + :code:`logging.getLogger("google").propagate = True` in your code. +#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for + one library, but decide you need to also set up environment-based logging configuration for another library. + + #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual + if the code -based configuration gets applied first. + +#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get + executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. + (This is the reason for 2.i. above.) From e6bc31d80b4a01a32aacae9e142db52687086993 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 25 Feb 2025 16:30:50 -0800 Subject: [PATCH 21/37] chore: add logging section to readme (#1018) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: add logging section to readme Source-Link: https://github.com/googleapis/synthtool/commit/d1011bc72b89a605d8e72c1f45c1db119fd56d74 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:fdc038572b896f739f95cc90e62f16c06e4f2ef0ef3bea343a358331862ad0f0 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Daniel Sanche --- .github/.OwlBot.lock.yaml | 4 ++-- .kokoro/build.sh | 20 ++++++-------------- 2 files changed, 8 insertions(+), 16 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 3f7634f25f..426c977fbd 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f016446d6e520e5fb552c45b110cba3f217bffdd3d06bdddd076e9e6d13266cf -# created: 2025-02-21T19:32:52.01306189Z + digest: sha256:fdc038572b896f739f95cc90e62f16c06e4f2ef0ef3bea343a358331862ad0f0 +# created: 2025-02-13T21:06:55.521673457Z diff --git a/.kokoro/build.sh b/.kokoro/build.sh index d84680bd8d..cfd7fc4bcb 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -15,13 +15,11 @@ set -eo pipefail -CURRENT_DIR=$(dirname "${BASH_SOURCE[0]}") - if [[ -z "${PROJECT_ROOT:-}" ]]; then - PROJECT_ROOT=$(realpath "${CURRENT_DIR}/..") + PROJECT_ROOT="github/python-firestore" fi -pushd "${PROJECT_ROOT}" +cd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -33,16 +31,10 @@ env | grep KOKORO export FIRESTORE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/firebase-credentials.json # Setup service account credentials. -if [[ -f "${KOKORO_GFILE_DIR}/service-account.json" ]] -then - export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json -fi +export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json # Setup project id. -if [[ -f "${KOKORO_GFILE_DIR}/project-id.json" ]] -then - export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") -fi +export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") # If this is a continuous build, send the test log to the FlakyBot. # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. @@ -57,7 +49,7 @@ fi # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3 -m nox -s ${NOX_SESSION:-} + python3 -m nox -s ${NOX_SESSION:-} else - python3 -m nox + python3 -m nox fi From d8ae170bf1c7269f8330a278618998b85af1740f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 26 Feb 2025 11:07:06 -0800 Subject: [PATCH 22/37] chore: pull up gapic updates (#1016) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Add REST Interceptors which support reading metadata feat: Add support for reading selective GAPIC generation methods from service YAML chore: Update gapic-generator-python to v1.22.0 PiperOrigin-RevId: 724026024 Source-Link: https://github.com/googleapis/googleapis/commit/ad9963857109513e77eed153a66264481789109f Source-Link: https://github.com/googleapis/googleapis-gen/commit/e291c4dd1d670eda19998de76f967e1603a48993 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZTI5MWM0ZGQxZDY3MGVkYTE5OTk4ZGU3NmY5NjdlMTYwM2E0ODk5MyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: expose the Firestore.executePipeline API to the preview branch docs: minor documentation updates to `StructuredQuery` docs: minor documentation changes for `distance_threshold` PiperOrigin-RevId: 731306872 Source-Link: https://github.com/googleapis/googleapis/commit/b6d5ae84d5070e249319dc898f81becf86546414 Source-Link: https://github.com/googleapis/googleapis-gen/commit/4cb9048871bf5906b390e8ea3d7b9f1ef6ac19d6 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNGNiOTA0ODg3MWJmNTkwNmIzOTBlOGVhM2Q3YjlmMWVmNmFjMTlkNiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Daniel Sanche --- .../services/firestore_admin/client.py | 73 +- .../firestore_admin/transports/rest.py | 656 ++++++++++++++++-- .../firestore_v1/services/firestore/client.py | 73 +- .../services/firestore/transports/rest.py | 386 ++++++++++- google/cloud/firestore_v1/types/query.py | 6 +- .../test_firestore_admin.py | 173 +++++ .../unit/gapic/firestore_v1/test_firestore.py | 120 ++++ 7 files changed, 1383 insertions(+), 104 deletions(-) diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/client.py b/google/cloud/firestore_admin_v1/services/firestore_admin/client.py index 9791ef7c69..fb91e547ac 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/client.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -679,6 +681,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -3761,16 +3790,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -3816,16 +3849,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest.py index ce9048bc86..cc62029d0e 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest.py @@ -289,12 +289,35 @@ def post_bulk_delete_documents( ) -> operations_pb2.Operation: """Post-rpc interceptor for bulk_delete_documents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_bulk_delete_documents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirestoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_bulk_delete_documents` interceptor runs + before the `post_bulk_delete_documents_with_metadata` interceptor. """ return response + def post_bulk_delete_documents_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for bulk_delete_documents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirestoreAdmin server but before it is returned to user code. + + We recommend only using this `post_bulk_delete_documents_with_metadata` + interceptor in new development instead of the `post_bulk_delete_documents` interceptor. + When both interceptors are used, this `post_bulk_delete_documents_with_metadata` interceptor runs after the + `post_bulk_delete_documents` interceptor. The (possibly modified) response returned by + `post_bulk_delete_documents` will be passed to + `post_bulk_delete_documents_with_metadata`. + """ + return response, metadata + def pre_create_backup_schedule( self, request: firestore_admin.CreateBackupScheduleRequest, @@ -315,12 +338,35 @@ def post_create_backup_schedule( ) -> schedule.BackupSchedule: """Post-rpc interceptor for create_backup_schedule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_backup_schedule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirestoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_create_backup_schedule` interceptor runs + before the `post_create_backup_schedule_with_metadata` interceptor. """ return response + def post_create_backup_schedule_with_metadata( + self, + response: schedule.BackupSchedule, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[schedule.BackupSchedule, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_backup_schedule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirestoreAdmin server but before it is returned to user code. + + We recommend only using this `post_create_backup_schedule_with_metadata` + interceptor in new development instead of the `post_create_backup_schedule` interceptor. + When both interceptors are used, this `post_create_backup_schedule_with_metadata` interceptor runs after the + `post_create_backup_schedule` interceptor. The (possibly modified) response returned by + `post_create_backup_schedule` will be passed to + `post_create_backup_schedule_with_metadata`. + """ + return response, metadata + def pre_create_database( self, request: firestore_admin.CreateDatabaseRequest, @@ -340,12 +386,35 @@ def post_create_database( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_database - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_database_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirestoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_create_database` interceptor runs + before the `post_create_database_with_metadata` interceptor. """ return response + def post_create_database_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_database + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirestoreAdmin server but before it is returned to user code. + + We recommend only using this `post_create_database_with_metadata` + interceptor in new development instead of the `post_create_database` interceptor. + When both interceptors are used, this `post_create_database_with_metadata` interceptor runs after the + `post_create_database` interceptor. The (possibly modified) response returned by + `post_create_database` will be passed to + `post_create_database_with_metadata`. + """ + return response, metadata + def pre_create_index( self, request: firestore_admin.CreateIndexRequest, @@ -365,12 +434,35 @@ def post_create_index( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_index - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_index_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirestoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_create_index` interceptor runs + before the `post_create_index_with_metadata` interceptor. """ return response + def post_create_index_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_index + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirestoreAdmin server but before it is returned to user code. + + We recommend only using this `post_create_index_with_metadata` + interceptor in new development instead of the `post_create_index` interceptor. + When both interceptors are used, this `post_create_index_with_metadata` interceptor runs after the + `post_create_index` interceptor. The (possibly modified) response returned by + `post_create_index` will be passed to + `post_create_index_with_metadata`. + """ + return response, metadata + def pre_delete_backup( self, request: firestore_admin.DeleteBackupRequest, @@ -419,12 +511,35 @@ def post_delete_database( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_database - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_database_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirestoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_delete_database` interceptor runs + before the `post_delete_database_with_metadata` interceptor. """ return response + def post_delete_database_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_database + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirestoreAdmin server but before it is returned to user code. + + We recommend only using this `post_delete_database_with_metadata` + interceptor in new development instead of the `post_delete_database` interceptor. + When both interceptors are used, this `post_delete_database_with_metadata` interceptor runs after the + `post_delete_database` interceptor. The (possibly modified) response returned by + `post_delete_database` will be passed to + `post_delete_database_with_metadata`. + """ + return response, metadata + def pre_delete_index( self, request: firestore_admin.DeleteIndexRequest, @@ -458,12 +573,35 @@ def post_export_documents( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_documents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_documents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirestoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_export_documents` interceptor runs + before the `post_export_documents_with_metadata` interceptor. """ return response + def post_export_documents_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_documents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirestoreAdmin server but before it is returned to user code. + + We recommend only using this `post_export_documents_with_metadata` + interceptor in new development instead of the `post_export_documents` interceptor. + When both interceptors are used, this `post_export_documents_with_metadata` interceptor runs after the + `post_export_documents` interceptor. The (possibly modified) response returned by + `post_export_documents` will be passed to + `post_export_documents_with_metadata`. + """ + return response, metadata + def pre_get_backup( self, request: firestore_admin.GetBackupRequest, @@ -481,12 +619,33 @@ def pre_get_backup( def post_get_backup(self, response: backup.Backup) -> backup.Backup: """Post-rpc interceptor for get_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirestoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_get_backup` interceptor runs + before the `post_get_backup_with_metadata` interceptor. """ return response + def post_get_backup_with_metadata( + self, response: backup.Backup, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[backup.Backup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirestoreAdmin server but before it is returned to user code. + + We recommend only using this `post_get_backup_with_metadata` + interceptor in new development instead of the `post_get_backup` interceptor. + When both interceptors are used, this `post_get_backup_with_metadata` interceptor runs after the + `post_get_backup` interceptor. The (possibly modified) response returned by + `post_get_backup` will be passed to + `post_get_backup_with_metadata`. + """ + return response, metadata + def pre_get_backup_schedule( self, request: firestore_admin.GetBackupScheduleRequest, @@ -507,12 +666,35 @@ def post_get_backup_schedule( ) -> schedule.BackupSchedule: """Post-rpc interceptor for get_backup_schedule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_backup_schedule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirestoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_get_backup_schedule` interceptor runs + before the `post_get_backup_schedule_with_metadata` interceptor. """ return response + def post_get_backup_schedule_with_metadata( + self, + response: schedule.BackupSchedule, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[schedule.BackupSchedule, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_backup_schedule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirestoreAdmin server but before it is returned to user code. + + We recommend only using this `post_get_backup_schedule_with_metadata` + interceptor in new development instead of the `post_get_backup_schedule` interceptor. + When both interceptors are used, this `post_get_backup_schedule_with_metadata` interceptor runs after the + `post_get_backup_schedule` interceptor. The (possibly modified) response returned by + `post_get_backup_schedule` will be passed to + `post_get_backup_schedule_with_metadata`. + """ + return response, metadata + def pre_get_database( self, request: firestore_admin.GetDatabaseRequest, @@ -530,12 +712,35 @@ def pre_get_database( def post_get_database(self, response: database.Database) -> database.Database: """Post-rpc interceptor for get_database - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_database_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirestoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_get_database` interceptor runs + before the `post_get_database_with_metadata` interceptor. """ return response + def post_get_database_with_metadata( + self, + response: database.Database, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[database.Database, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_database + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirestoreAdmin server but before it is returned to user code. + + We recommend only using this `post_get_database_with_metadata` + interceptor in new development instead of the `post_get_database` interceptor. + When both interceptors are used, this `post_get_database_with_metadata` interceptor runs after the + `post_get_database` interceptor. The (possibly modified) response returned by + `post_get_database` will be passed to + `post_get_database_with_metadata`. + """ + return response, metadata + def pre_get_field( self, request: firestore_admin.GetFieldRequest, @@ -553,12 +758,33 @@ def pre_get_field( def post_get_field(self, response: field.Field) -> field.Field: """Post-rpc interceptor for get_field - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_field_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirestoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_get_field` interceptor runs + before the `post_get_field_with_metadata` interceptor. """ return response + def post_get_field_with_metadata( + self, response: field.Field, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[field.Field, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_field + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirestoreAdmin server but before it is returned to user code. + + We recommend only using this `post_get_field_with_metadata` + interceptor in new development instead of the `post_get_field` interceptor. + When both interceptors are used, this `post_get_field_with_metadata` interceptor runs after the + `post_get_field` interceptor. The (possibly modified) response returned by + `post_get_field` will be passed to + `post_get_field_with_metadata`. + """ + return response, metadata + def pre_get_index( self, request: firestore_admin.GetIndexRequest, @@ -576,12 +802,33 @@ def pre_get_index( def post_get_index(self, response: index.Index) -> index.Index: """Post-rpc interceptor for get_index - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_index_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirestoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_get_index` interceptor runs + before the `post_get_index_with_metadata` interceptor. """ return response + def post_get_index_with_metadata( + self, response: index.Index, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[index.Index, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_index + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirestoreAdmin server but before it is returned to user code. + + We recommend only using this `post_get_index_with_metadata` + interceptor in new development instead of the `post_get_index` interceptor. + When both interceptors are used, this `post_get_index_with_metadata` interceptor runs after the + `post_get_index` interceptor. The (possibly modified) response returned by + `post_get_index` will be passed to + `post_get_index_with_metadata`. + """ + return response, metadata + def pre_import_documents( self, request: firestore_admin.ImportDocumentsRequest, @@ -601,12 +848,35 @@ def post_import_documents( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_documents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_documents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirestoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_import_documents` interceptor runs + before the `post_import_documents_with_metadata` interceptor. """ return response + def post_import_documents_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_documents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirestoreAdmin server but before it is returned to user code. + + We recommend only using this `post_import_documents_with_metadata` + interceptor in new development instead of the `post_import_documents` interceptor. + When both interceptors are used, this `post_import_documents_with_metadata` interceptor runs after the + `post_import_documents` interceptor. The (possibly modified) response returned by + `post_import_documents` will be passed to + `post_import_documents_with_metadata`. + """ + return response, metadata + def pre_list_backups( self, request: firestore_admin.ListBackupsRequest, @@ -626,12 +896,37 @@ def post_list_backups( ) -> firestore_admin.ListBackupsResponse: """Post-rpc interceptor for list_backups - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_backups_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirestoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_list_backups` interceptor runs + before the `post_list_backups_with_metadata` interceptor. """ return response + def post_list_backups_with_metadata( + self, + response: firestore_admin.ListBackupsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.ListBackupsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_backups + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirestoreAdmin server but before it is returned to user code. + + We recommend only using this `post_list_backups_with_metadata` + interceptor in new development instead of the `post_list_backups` interceptor. + When both interceptors are used, this `post_list_backups_with_metadata` interceptor runs after the + `post_list_backups` interceptor. The (possibly modified) response returned by + `post_list_backups` will be passed to + `post_list_backups_with_metadata`. + """ + return response, metadata + def pre_list_backup_schedules( self, request: firestore_admin.ListBackupSchedulesRequest, @@ -652,12 +947,38 @@ def post_list_backup_schedules( ) -> firestore_admin.ListBackupSchedulesResponse: """Post-rpc interceptor for list_backup_schedules - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_backup_schedules_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirestoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_list_backup_schedules` interceptor runs + before the `post_list_backup_schedules_with_metadata` interceptor. """ return response + def post_list_backup_schedules_with_metadata( + self, + response: firestore_admin.ListBackupSchedulesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.ListBackupSchedulesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_backup_schedules + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirestoreAdmin server but before it is returned to user code. + + We recommend only using this `post_list_backup_schedules_with_metadata` + interceptor in new development instead of the `post_list_backup_schedules` interceptor. + When both interceptors are used, this `post_list_backup_schedules_with_metadata` interceptor runs after the + `post_list_backup_schedules` interceptor. The (possibly modified) response returned by + `post_list_backup_schedules` will be passed to + `post_list_backup_schedules_with_metadata`. + """ + return response, metadata + def pre_list_databases( self, request: firestore_admin.ListDatabasesRequest, @@ -677,12 +998,37 @@ def post_list_databases( ) -> firestore_admin.ListDatabasesResponse: """Post-rpc interceptor for list_databases - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_databases_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirestoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_list_databases` interceptor runs + before the `post_list_databases_with_metadata` interceptor. """ return response + def post_list_databases_with_metadata( + self, + response: firestore_admin.ListDatabasesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.ListDatabasesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_databases + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirestoreAdmin server but before it is returned to user code. + + We recommend only using this `post_list_databases_with_metadata` + interceptor in new development instead of the `post_list_databases` interceptor. + When both interceptors are used, this `post_list_databases_with_metadata` interceptor runs after the + `post_list_databases` interceptor. The (possibly modified) response returned by + `post_list_databases` will be passed to + `post_list_databases_with_metadata`. + """ + return response, metadata + def pre_list_fields( self, request: firestore_admin.ListFieldsRequest, @@ -702,12 +1048,37 @@ def post_list_fields( ) -> firestore_admin.ListFieldsResponse: """Post-rpc interceptor for list_fields - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_fields_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirestoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_list_fields` interceptor runs + before the `post_list_fields_with_metadata` interceptor. """ return response + def post_list_fields_with_metadata( + self, + response: firestore_admin.ListFieldsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.ListFieldsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_fields + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirestoreAdmin server but before it is returned to user code. + + We recommend only using this `post_list_fields_with_metadata` + interceptor in new development instead of the `post_list_fields` interceptor. + When both interceptors are used, this `post_list_fields_with_metadata` interceptor runs after the + `post_list_fields` interceptor. The (possibly modified) response returned by + `post_list_fields` will be passed to + `post_list_fields_with_metadata`. + """ + return response, metadata + def pre_list_indexes( self, request: firestore_admin.ListIndexesRequest, @@ -727,12 +1098,37 @@ def post_list_indexes( ) -> firestore_admin.ListIndexesResponse: """Post-rpc interceptor for list_indexes - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_indexes_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirestoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_list_indexes` interceptor runs + before the `post_list_indexes_with_metadata` interceptor. """ return response + def post_list_indexes_with_metadata( + self, + response: firestore_admin.ListIndexesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore_admin.ListIndexesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_indexes + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirestoreAdmin server but before it is returned to user code. + + We recommend only using this `post_list_indexes_with_metadata` + interceptor in new development instead of the `post_list_indexes` interceptor. + When both interceptors are used, this `post_list_indexes_with_metadata` interceptor runs after the + `post_list_indexes` interceptor. The (possibly modified) response returned by + `post_list_indexes` will be passed to + `post_list_indexes_with_metadata`. + """ + return response, metadata + def pre_restore_database( self, request: firestore_admin.RestoreDatabaseRequest, @@ -752,12 +1148,35 @@ def post_restore_database( ) -> operations_pb2.Operation: """Post-rpc interceptor for restore_database - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_restore_database_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirestoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_restore_database` interceptor runs + before the `post_restore_database_with_metadata` interceptor. """ return response + def post_restore_database_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for restore_database + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirestoreAdmin server but before it is returned to user code. + + We recommend only using this `post_restore_database_with_metadata` + interceptor in new development instead of the `post_restore_database` interceptor. + When both interceptors are used, this `post_restore_database_with_metadata` interceptor runs after the + `post_restore_database` interceptor. The (possibly modified) response returned by + `post_restore_database` will be passed to + `post_restore_database_with_metadata`. + """ + return response, metadata + def pre_update_backup_schedule( self, request: firestore_admin.UpdateBackupScheduleRequest, @@ -778,12 +1197,35 @@ def post_update_backup_schedule( ) -> schedule.BackupSchedule: """Post-rpc interceptor for update_backup_schedule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_backup_schedule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirestoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_update_backup_schedule` interceptor runs + before the `post_update_backup_schedule_with_metadata` interceptor. """ return response + def post_update_backup_schedule_with_metadata( + self, + response: schedule.BackupSchedule, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[schedule.BackupSchedule, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_backup_schedule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirestoreAdmin server but before it is returned to user code. + + We recommend only using this `post_update_backup_schedule_with_metadata` + interceptor in new development instead of the `post_update_backup_schedule` interceptor. + When both interceptors are used, this `post_update_backup_schedule_with_metadata` interceptor runs after the + `post_update_backup_schedule` interceptor. The (possibly modified) response returned by + `post_update_backup_schedule` will be passed to + `post_update_backup_schedule_with_metadata`. + """ + return response, metadata + def pre_update_database( self, request: firestore_admin.UpdateDatabaseRequest, @@ -803,12 +1245,35 @@ def post_update_database( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_database - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_database_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirestoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_update_database` interceptor runs + before the `post_update_database_with_metadata` interceptor. """ return response + def post_update_database_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_database + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirestoreAdmin server but before it is returned to user code. + + We recommend only using this `post_update_database_with_metadata` + interceptor in new development instead of the `post_update_database` interceptor. + When both interceptors are used, this `post_update_database_with_metadata` interceptor runs after the + `post_update_database` interceptor. The (possibly modified) response returned by + `post_update_database` will be passed to + `post_update_database_with_metadata`. + """ + return response, metadata + def pre_update_field( self, request: firestore_admin.UpdateFieldRequest, @@ -828,12 +1293,35 @@ def post_update_field( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_field - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_field_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirestoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_update_field` interceptor runs + before the `post_update_field_with_metadata` interceptor. """ return response + def post_update_field_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_field + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirestoreAdmin server but before it is returned to user code. + + We recommend only using this `post_update_field_with_metadata` + interceptor in new development instead of the `post_update_field` interceptor. + When both interceptors are used, this `post_update_field_with_metadata` interceptor runs after the + `post_update_field` interceptor. The (possibly modified) response returned by + `post_update_field` will be passed to + `post_update_field_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -1237,6 +1725,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_bulk_delete_documents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_bulk_delete_documents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1393,6 +1885,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_backup_schedule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_backup_schedule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1541,6 +2037,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_database(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_database_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1689,6 +2189,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_index(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_index_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2048,6 +2552,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_database(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_database_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2305,6 +2813,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_documents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_documents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2455,6 +2967,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2604,6 +3120,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_backup_schedule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_backup_schedule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2745,6 +3265,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_database(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_database_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2895,6 +3419,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_field(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_field_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3043,6 +3571,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_index(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_index_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3193,6 +3725,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_documents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_documents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3336,6 +3872,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_backups(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_backups_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3484,6 +4024,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_backup_schedules(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_backup_schedules_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3628,6 +4172,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_databases(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_databases_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3775,6 +4323,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_fields(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_fields_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3920,6 +4472,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_indexes(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_indexes_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4072,6 +4628,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_restore_database(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_restore_database_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4228,6 +4788,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_backup_schedule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_backup_schedule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4376,6 +4940,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_database(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_database_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4524,6 +5092,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_field(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_field_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/google/cloud/firestore_v1/services/firestore/client.py b/google/cloud/firestore_v1/services/firestore/client.py index 2054b14388..bcb759c182 100644 --- a/google/cloud/firestore_v1/services/firestore/client.py +++ b/google/cloud/firestore_v1/services/firestore/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -481,6 +483,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2348,16 +2377,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2403,16 +2436,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, diff --git a/google/cloud/firestore_v1/services/firestore/transports/rest.py b/google/cloud/firestore_v1/services/firestore/transports/rest.py index 31546b37ea..250a766904 100644 --- a/google/cloud/firestore_v1/services/firestore/transports/rest.py +++ b/google/cloud/firestore_v1/services/firestore/transports/rest.py @@ -208,12 +208,37 @@ def post_batch_get_documents( ) -> rest_streaming.ResponseIterator: """Post-rpc interceptor for batch_get_documents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_get_documents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Firestore server but before - it is returned to user code. + it is returned to user code. This `post_batch_get_documents` interceptor runs + before the `post_batch_get_documents_with_metadata` interceptor. """ return response + def post_batch_get_documents_with_metadata( + self, + response: rest_streaming.ResponseIterator, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + rest_streaming.ResponseIterator, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for batch_get_documents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Firestore server but before it is returned to user code. + + We recommend only using this `post_batch_get_documents_with_metadata` + interceptor in new development instead of the `post_batch_get_documents` interceptor. + When both interceptors are used, this `post_batch_get_documents_with_metadata` interceptor runs after the + `post_batch_get_documents` interceptor. The (possibly modified) response returned by + `post_batch_get_documents` will be passed to + `post_batch_get_documents_with_metadata`. + """ + return response, metadata + def pre_batch_write( self, request: firestore.BatchWriteRequest, @@ -231,12 +256,35 @@ def post_batch_write( ) -> firestore.BatchWriteResponse: """Post-rpc interceptor for batch_write - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_write_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Firestore server but before - it is returned to user code. + it is returned to user code. This `post_batch_write` interceptor runs + before the `post_batch_write_with_metadata` interceptor. """ return response + def post_batch_write_with_metadata( + self, + response: firestore.BatchWriteResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[firestore.BatchWriteResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_write + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Firestore server but before it is returned to user code. + + We recommend only using this `post_batch_write_with_metadata` + interceptor in new development instead of the `post_batch_write` interceptor. + When both interceptors are used, this `post_batch_write_with_metadata` interceptor runs after the + `post_batch_write` interceptor. The (possibly modified) response returned by + `post_batch_write` will be passed to + `post_batch_write_with_metadata`. + """ + return response, metadata + def pre_begin_transaction( self, request: firestore.BeginTransactionRequest, @@ -256,12 +304,37 @@ def post_begin_transaction( ) -> firestore.BeginTransactionResponse: """Post-rpc interceptor for begin_transaction - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_begin_transaction_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Firestore server but before - it is returned to user code. + it is returned to user code. This `post_begin_transaction` interceptor runs + before the `post_begin_transaction_with_metadata` interceptor. """ return response + def post_begin_transaction_with_metadata( + self, + response: firestore.BeginTransactionResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore.BeginTransactionResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for begin_transaction + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Firestore server but before it is returned to user code. + + We recommend only using this `post_begin_transaction_with_metadata` + interceptor in new development instead of the `post_begin_transaction` interceptor. + When both interceptors are used, this `post_begin_transaction_with_metadata` interceptor runs after the + `post_begin_transaction` interceptor. The (possibly modified) response returned by + `post_begin_transaction` will be passed to + `post_begin_transaction_with_metadata`. + """ + return response, metadata + def pre_commit( self, request: firestore.CommitRequest, @@ -279,12 +352,35 @@ def post_commit( ) -> firestore.CommitResponse: """Post-rpc interceptor for commit - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_commit_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Firestore server but before - it is returned to user code. + it is returned to user code. This `post_commit` interceptor runs + before the `post_commit_with_metadata` interceptor. """ return response + def post_commit_with_metadata( + self, + response: firestore.CommitResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[firestore.CommitResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for commit + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Firestore server but before it is returned to user code. + + We recommend only using this `post_commit_with_metadata` + interceptor in new development instead of the `post_commit` interceptor. + When both interceptors are used, this `post_commit_with_metadata` interceptor runs after the + `post_commit` interceptor. The (possibly modified) response returned by + `post_commit` will be passed to + `post_commit_with_metadata`. + """ + return response, metadata + def pre_create_document( self, request: firestore.CreateDocumentRequest, @@ -302,12 +398,35 @@ def pre_create_document( def post_create_document(self, response: document.Document) -> document.Document: """Post-rpc interceptor for create_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Firestore server but before - it is returned to user code. + it is returned to user code. This `post_create_document` interceptor runs + before the `post_create_document_with_metadata` interceptor. """ return response + def post_create_document_with_metadata( + self, + response: document.Document, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[document.Document, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Firestore server but before it is returned to user code. + + We recommend only using this `post_create_document_with_metadata` + interceptor in new development instead of the `post_create_document` interceptor. + When both interceptors are used, this `post_create_document_with_metadata` interceptor runs after the + `post_create_document` interceptor. The (possibly modified) response returned by + `post_create_document` will be passed to + `post_create_document_with_metadata`. + """ + return response, metadata + def pre_delete_document( self, request: firestore.DeleteDocumentRequest, @@ -337,12 +456,35 @@ def pre_get_document( def post_get_document(self, response: document.Document) -> document.Document: """Post-rpc interceptor for get_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Firestore server but before - it is returned to user code. + it is returned to user code. This `post_get_document` interceptor runs + before the `post_get_document_with_metadata` interceptor. """ return response + def post_get_document_with_metadata( + self, + response: document.Document, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[document.Document, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Firestore server but before it is returned to user code. + + We recommend only using this `post_get_document_with_metadata` + interceptor in new development instead of the `post_get_document` interceptor. + When both interceptors are used, this `post_get_document_with_metadata` interceptor runs after the + `post_get_document` interceptor. The (possibly modified) response returned by + `post_get_document` will be passed to + `post_get_document_with_metadata`. + """ + return response, metadata + def pre_list_collection_ids( self, request: firestore.ListCollectionIdsRequest, @@ -362,12 +504,37 @@ def post_list_collection_ids( ) -> firestore.ListCollectionIdsResponse: """Post-rpc interceptor for list_collection_ids - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_collection_ids_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Firestore server but before - it is returned to user code. + it is returned to user code. This `post_list_collection_ids` interceptor runs + before the `post_list_collection_ids_with_metadata` interceptor. """ return response + def post_list_collection_ids_with_metadata( + self, + response: firestore.ListCollectionIdsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore.ListCollectionIdsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_collection_ids + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Firestore server but before it is returned to user code. + + We recommend only using this `post_list_collection_ids_with_metadata` + interceptor in new development instead of the `post_list_collection_ids` interceptor. + When both interceptors are used, this `post_list_collection_ids_with_metadata` interceptor runs after the + `post_list_collection_ids` interceptor. The (possibly modified) response returned by + `post_list_collection_ids` will be passed to + `post_list_collection_ids_with_metadata`. + """ + return response, metadata + def pre_list_documents( self, request: firestore.ListDocumentsRequest, @@ -385,12 +552,37 @@ def post_list_documents( ) -> firestore.ListDocumentsResponse: """Post-rpc interceptor for list_documents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_documents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Firestore server but before - it is returned to user code. + it is returned to user code. This `post_list_documents` interceptor runs + before the `post_list_documents_with_metadata` interceptor. """ return response + def post_list_documents_with_metadata( + self, + response: firestore.ListDocumentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore.ListDocumentsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_documents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Firestore server but before it is returned to user code. + + We recommend only using this `post_list_documents_with_metadata` + interceptor in new development instead of the `post_list_documents` interceptor. + When both interceptors are used, this `post_list_documents_with_metadata` interceptor runs after the + `post_list_documents` interceptor. The (possibly modified) response returned by + `post_list_documents` will be passed to + `post_list_documents_with_metadata`. + """ + return response, metadata + def pre_partition_query( self, request: firestore.PartitionQueryRequest, @@ -410,12 +602,37 @@ def post_partition_query( ) -> firestore.PartitionQueryResponse: """Post-rpc interceptor for partition_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_partition_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Firestore server but before - it is returned to user code. + it is returned to user code. This `post_partition_query` interceptor runs + before the `post_partition_query_with_metadata` interceptor. """ return response + def post_partition_query_with_metadata( + self, + response: firestore.PartitionQueryResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore.PartitionQueryResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for partition_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Firestore server but before it is returned to user code. + + We recommend only using this `post_partition_query_with_metadata` + interceptor in new development instead of the `post_partition_query` interceptor. + When both interceptors are used, this `post_partition_query_with_metadata` interceptor runs after the + `post_partition_query` interceptor. The (possibly modified) response returned by + `post_partition_query` will be passed to + `post_partition_query_with_metadata`. + """ + return response, metadata + def pre_rollback( self, request: firestore.RollbackRequest, @@ -447,12 +664,37 @@ def post_run_aggregation_query( ) -> rest_streaming.ResponseIterator: """Post-rpc interceptor for run_aggregation_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_run_aggregation_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Firestore server but before - it is returned to user code. + it is returned to user code. This `post_run_aggregation_query` interceptor runs + before the `post_run_aggregation_query_with_metadata` interceptor. """ return response + def post_run_aggregation_query_with_metadata( + self, + response: rest_streaming.ResponseIterator, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + rest_streaming.ResponseIterator, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for run_aggregation_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Firestore server but before it is returned to user code. + + We recommend only using this `post_run_aggregation_query_with_metadata` + interceptor in new development instead of the `post_run_aggregation_query` interceptor. + When both interceptors are used, this `post_run_aggregation_query_with_metadata` interceptor runs after the + `post_run_aggregation_query` interceptor. The (possibly modified) response returned by + `post_run_aggregation_query` will be passed to + `post_run_aggregation_query_with_metadata`. + """ + return response, metadata + def pre_run_query( self, request: firestore.RunQueryRequest, @@ -470,12 +712,37 @@ def post_run_query( ) -> rest_streaming.ResponseIterator: """Post-rpc interceptor for run_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_run_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Firestore server but before - it is returned to user code. + it is returned to user code. This `post_run_query` interceptor runs + before the `post_run_query_with_metadata` interceptor. """ return response + def post_run_query_with_metadata( + self, + response: rest_streaming.ResponseIterator, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + rest_streaming.ResponseIterator, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for run_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Firestore server but before it is returned to user code. + + We recommend only using this `post_run_query_with_metadata` + interceptor in new development instead of the `post_run_query` interceptor. + When both interceptors are used, this `post_run_query_with_metadata` interceptor runs after the + `post_run_query` interceptor. The (possibly modified) response returned by + `post_run_query` will be passed to + `post_run_query_with_metadata`. + """ + return response, metadata + def pre_update_document( self, request: firestore.UpdateDocumentRequest, @@ -495,12 +762,35 @@ def post_update_document( ) -> gf_document.Document: """Post-rpc interceptor for update_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Firestore server but before - it is returned to user code. + it is returned to user code. This `post_update_document` interceptor runs + before the `post_update_document_with_metadata` interceptor. """ return response + def post_update_document_with_metadata( + self, + response: gf_document.Document, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gf_document.Document, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Firestore server but before it is returned to user code. + + We recommend only using this `post_update_document_with_metadata` + interceptor in new development instead of the `post_update_document` interceptor. + When both interceptors are used, this `post_update_document_with_metadata` interceptor runs after the + `post_update_document` interceptor. The (possibly modified) response returned by + `post_update_document` will be passed to + `post_update_document_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -820,6 +1110,10 @@ def __call__( ) resp = self._interceptor.post_batch_get_documents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_get_documents_with_metadata( + resp, response_metadata + ) return resp class _BatchWrite(_BaseFirestoreRestTransport._BaseBatchWrite, FirestoreRestStub): @@ -950,6 +1244,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_write(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_write_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1101,6 +1399,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_begin_transaction(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_begin_transaction_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1252,6 +1554,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_commit(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_commit_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1408,6 +1714,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1665,6 +1975,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1816,6 +2130,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_collection_ids(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_collection_ids_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1965,6 +2283,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_documents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_documents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2136,6 +2458,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_partition_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_partition_query_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2402,6 +2728,10 @@ def __call__( ) resp = self._interceptor.post_run_aggregation_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_run_aggregation_query_with_metadata( + resp, response_metadata + ) return resp class _RunQuery(_BaseFirestoreRestTransport._BaseRunQuery, FirestoreRestStub): @@ -2528,6 +2858,10 @@ def __call__( resp = rest_streaming.ResponseIterator(response, firestore.RunQueryResponse) resp = self._interceptor.post_run_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_run_query_with_metadata( + resp, response_metadata + ) return resp class _UpdateDocument( @@ -2663,6 +2997,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/google/cloud/firestore_v1/types/query.py b/google/cloud/firestore_v1/types/query.py index 2fda44ebe3..8b21fb6420 100644 --- a/google/cloud/firestore_v1/types/query.py +++ b/google/cloud/firestore_v1/types/query.py @@ -44,6 +44,7 @@ class StructuredQuery(proto.Message): 4. order_by + start_at + end_at 5. offset 6. limit + 7. find_nearest Attributes: select (google.cloud.firestore_v1.types.StructuredQuery.Projection): @@ -554,8 +555,9 @@ class FindNearest(proto.Message): when the vectors are more similar, the comparison is inverted. - For EUCLIDEAN, COSINE: WHERE distance <= distance_threshold - For DOT_PRODUCT: WHERE distance >= distance_threshold + - For EUCLIDEAN, COSINE: WHERE distance <= + distance_threshold + - For DOT_PRODUCT: WHERE distance >= distance_threshold """ class DistanceMeasure(proto.Enum): diff --git a/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py b/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py index 63e24e25f5..85a1119297 100644 --- a/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py +++ b/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py @@ -86,6 +86,14 @@ import google.auth +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER chunk = data[i : i + chunk_size] @@ -343,6 +351,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = FirestoreAdminClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = FirestoreAdminClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -15316,10 +15367,13 @@ def test_create_index_rest_interceptors(null_interceptor): ), mock.patch.object( transports.FirestoreAdminRestInterceptor, "post_create_index" ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_create_index_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirestoreAdminRestInterceptor, "pre_create_index" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore_admin.CreateIndexRequest.pb( firestore_admin.CreateIndexRequest() ) @@ -15343,6 +15397,7 @@ def test_create_index_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_index( request, @@ -15354,6 +15409,7 @@ def test_create_index_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_indexes_rest_bad_request(request_type=firestore_admin.ListIndexesRequest): @@ -15440,10 +15496,13 @@ def test_list_indexes_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirestoreAdminRestInterceptor, "post_list_indexes" ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_list_indexes_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirestoreAdminRestInterceptor, "pre_list_indexes" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore_admin.ListIndexesRequest.pb( firestore_admin.ListIndexesRequest() ) @@ -15469,6 +15528,10 @@ def test_list_indexes_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = firestore_admin.ListIndexesResponse() + post_with_metadata.return_value = ( + firestore_admin.ListIndexesResponse(), + metadata, + ) client.list_indexes( request, @@ -15480,6 +15543,7 @@ def test_list_indexes_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_index_rest_bad_request(request_type=firestore_admin.GetIndexRequest): @@ -15572,10 +15636,13 @@ def test_get_index_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirestoreAdminRestInterceptor, "post_get_index" ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_get_index_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirestoreAdminRestInterceptor, "pre_get_index" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore_admin.GetIndexRequest.pb( firestore_admin.GetIndexRequest() ) @@ -15599,6 +15666,7 @@ def test_get_index_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = index.Index() + post_with_metadata.return_value = index.Index(), metadata client.get_index( request, @@ -15610,6 +15678,7 @@ def test_get_index_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_index_rest_bad_request(request_type=firestore_admin.DeleteIndexRequest): @@ -15807,10 +15876,13 @@ def test_get_field_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirestoreAdminRestInterceptor, "post_get_field" ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_get_field_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirestoreAdminRestInterceptor, "pre_get_field" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore_admin.GetFieldRequest.pb( firestore_admin.GetFieldRequest() ) @@ -15834,6 +15906,7 @@ def test_get_field_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = field.Field() + post_with_metadata.return_value = field.Field(), metadata client.get_field( request, @@ -15845,6 +15918,7 @@ def test_get_field_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_field_rest_bad_request(request_type=firestore_admin.UpdateFieldRequest): @@ -16023,10 +16097,13 @@ def test_update_field_rest_interceptors(null_interceptor): ), mock.patch.object( transports.FirestoreAdminRestInterceptor, "post_update_field" ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_update_field_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirestoreAdminRestInterceptor, "pre_update_field" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore_admin.UpdateFieldRequest.pb( firestore_admin.UpdateFieldRequest() ) @@ -16050,6 +16127,7 @@ def test_update_field_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_field( request, @@ -16061,6 +16139,7 @@ def test_update_field_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_fields_rest_bad_request(request_type=firestore_admin.ListFieldsRequest): @@ -16147,10 +16226,13 @@ def test_list_fields_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirestoreAdminRestInterceptor, "post_list_fields" ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_list_fields_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirestoreAdminRestInterceptor, "pre_list_fields" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore_admin.ListFieldsRequest.pb( firestore_admin.ListFieldsRequest() ) @@ -16176,6 +16258,7 @@ def test_list_fields_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = firestore_admin.ListFieldsResponse() + post_with_metadata.return_value = firestore_admin.ListFieldsResponse(), metadata client.list_fields( request, @@ -16187,6 +16270,7 @@ def test_list_fields_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_documents_rest_bad_request( @@ -16267,10 +16351,13 @@ def test_export_documents_rest_interceptors(null_interceptor): ), mock.patch.object( transports.FirestoreAdminRestInterceptor, "post_export_documents" ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_export_documents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirestoreAdminRestInterceptor, "pre_export_documents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore_admin.ExportDocumentsRequest.pb( firestore_admin.ExportDocumentsRequest() ) @@ -16294,6 +16381,7 @@ def test_export_documents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_documents( request, @@ -16305,6 +16393,7 @@ def test_export_documents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_documents_rest_bad_request( @@ -16385,10 +16474,13 @@ def test_import_documents_rest_interceptors(null_interceptor): ), mock.patch.object( transports.FirestoreAdminRestInterceptor, "post_import_documents" ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_import_documents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirestoreAdminRestInterceptor, "pre_import_documents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore_admin.ImportDocumentsRequest.pb( firestore_admin.ImportDocumentsRequest() ) @@ -16412,6 +16504,7 @@ def test_import_documents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_documents( request, @@ -16423,6 +16516,7 @@ def test_import_documents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_bulk_delete_documents_rest_bad_request( @@ -16503,10 +16597,14 @@ def test_bulk_delete_documents_rest_interceptors(null_interceptor): ), mock.patch.object( transports.FirestoreAdminRestInterceptor, "post_bulk_delete_documents" ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, + "post_bulk_delete_documents_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.FirestoreAdminRestInterceptor, "pre_bulk_delete_documents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore_admin.BulkDeleteDocumentsRequest.pb( firestore_admin.BulkDeleteDocumentsRequest() ) @@ -16530,6 +16628,7 @@ def test_bulk_delete_documents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.bulk_delete_documents( request, @@ -16541,6 +16640,7 @@ def test_bulk_delete_documents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_database_rest_bad_request( @@ -16717,10 +16817,13 @@ def test_create_database_rest_interceptors(null_interceptor): ), mock.patch.object( transports.FirestoreAdminRestInterceptor, "post_create_database" ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_create_database_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirestoreAdminRestInterceptor, "pre_create_database" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore_admin.CreateDatabaseRequest.pb( firestore_admin.CreateDatabaseRequest() ) @@ -16744,6 +16847,7 @@ def test_create_database_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_database( request, @@ -16755,6 +16859,7 @@ def test_create_database_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_database_rest_bad_request(request_type=firestore_admin.GetDatabaseRequest): @@ -16866,10 +16971,13 @@ def test_get_database_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirestoreAdminRestInterceptor, "post_get_database" ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_get_database_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirestoreAdminRestInterceptor, "pre_get_database" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore_admin.GetDatabaseRequest.pb( firestore_admin.GetDatabaseRequest() ) @@ -16893,6 +17001,7 @@ def test_get_database_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = database.Database() + post_with_metadata.return_value = database.Database(), metadata client.get_database( request, @@ -16904,6 +17013,7 @@ def test_get_database_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_databases_rest_bad_request( @@ -16988,10 +17098,13 @@ def test_list_databases_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirestoreAdminRestInterceptor, "post_list_databases" ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_list_databases_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirestoreAdminRestInterceptor, "pre_list_databases" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore_admin.ListDatabasesRequest.pb( firestore_admin.ListDatabasesRequest() ) @@ -17017,6 +17130,10 @@ def test_list_databases_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = firestore_admin.ListDatabasesResponse() + post_with_metadata.return_value = ( + firestore_admin.ListDatabasesResponse(), + metadata, + ) client.list_databases( request, @@ -17028,6 +17145,7 @@ def test_list_databases_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_database_rest_bad_request( @@ -17204,10 +17322,13 @@ def test_update_database_rest_interceptors(null_interceptor): ), mock.patch.object( transports.FirestoreAdminRestInterceptor, "post_update_database" ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_update_database_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirestoreAdminRestInterceptor, "pre_update_database" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore_admin.UpdateDatabaseRequest.pb( firestore_admin.UpdateDatabaseRequest() ) @@ -17231,6 +17352,7 @@ def test_update_database_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_database( request, @@ -17242,6 +17364,7 @@ def test_update_database_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_database_rest_bad_request( @@ -17322,10 +17445,13 @@ def test_delete_database_rest_interceptors(null_interceptor): ), mock.patch.object( transports.FirestoreAdminRestInterceptor, "post_delete_database" ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_delete_database_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirestoreAdminRestInterceptor, "pre_delete_database" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore_admin.DeleteDatabaseRequest.pb( firestore_admin.DeleteDatabaseRequest() ) @@ -17349,6 +17475,7 @@ def test_delete_database_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_database( request, @@ -17360,6 +17487,7 @@ def test_delete_database_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_backup_rest_bad_request(request_type=firestore_admin.GetBackupRequest): @@ -17448,10 +17576,13 @@ def test_get_backup_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirestoreAdminRestInterceptor, "post_get_backup" ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_get_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirestoreAdminRestInterceptor, "pre_get_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore_admin.GetBackupRequest.pb( firestore_admin.GetBackupRequest() ) @@ -17475,6 +17606,7 @@ def test_get_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = backup.Backup() + post_with_metadata.return_value = backup.Backup(), metadata client.get_backup( request, @@ -17486,6 +17618,7 @@ def test_get_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_backups_rest_bad_request(request_type=firestore_admin.ListBackupsRequest): @@ -17568,10 +17701,13 @@ def test_list_backups_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirestoreAdminRestInterceptor, "post_list_backups" ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_list_backups_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirestoreAdminRestInterceptor, "pre_list_backups" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore_admin.ListBackupsRequest.pb( firestore_admin.ListBackupsRequest() ) @@ -17597,6 +17733,10 @@ def test_list_backups_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = firestore_admin.ListBackupsResponse() + post_with_metadata.return_value = ( + firestore_admin.ListBackupsResponse(), + metadata, + ) client.list_backups( request, @@ -17608,6 +17748,7 @@ def test_list_backups_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_backup_rest_bad_request( @@ -17797,10 +17938,13 @@ def test_restore_database_rest_interceptors(null_interceptor): ), mock.patch.object( transports.FirestoreAdminRestInterceptor, "post_restore_database" ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, "post_restore_database_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirestoreAdminRestInterceptor, "pre_restore_database" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore_admin.RestoreDatabaseRequest.pb( firestore_admin.RestoreDatabaseRequest() ) @@ -17824,6 +17968,7 @@ def test_restore_database_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.restore_database( request, @@ -17835,6 +17980,7 @@ def test_restore_database_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_backup_schedule_rest_bad_request( @@ -17996,10 +18142,14 @@ def test_create_backup_schedule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirestoreAdminRestInterceptor, "post_create_backup_schedule" ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, + "post_create_backup_schedule_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.FirestoreAdminRestInterceptor, "pre_create_backup_schedule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore_admin.CreateBackupScheduleRequest.pb( firestore_admin.CreateBackupScheduleRequest() ) @@ -18023,6 +18173,7 @@ def test_create_backup_schedule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = schedule.BackupSchedule() + post_with_metadata.return_value = schedule.BackupSchedule(), metadata client.create_backup_schedule( request, @@ -18034,6 +18185,7 @@ def test_create_backup_schedule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_backup_schedule_rest_bad_request( @@ -18122,10 +18274,14 @@ def test_get_backup_schedule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirestoreAdminRestInterceptor, "post_get_backup_schedule" ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, + "post_get_backup_schedule_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.FirestoreAdminRestInterceptor, "pre_get_backup_schedule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore_admin.GetBackupScheduleRequest.pb( firestore_admin.GetBackupScheduleRequest() ) @@ -18149,6 +18305,7 @@ def test_get_backup_schedule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = schedule.BackupSchedule() + post_with_metadata.return_value = schedule.BackupSchedule(), metadata client.get_backup_schedule( request, @@ -18160,6 +18317,7 @@ def test_get_backup_schedule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_backup_schedules_rest_bad_request( @@ -18241,10 +18399,14 @@ def test_list_backup_schedules_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirestoreAdminRestInterceptor, "post_list_backup_schedules" ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, + "post_list_backup_schedules_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.FirestoreAdminRestInterceptor, "pre_list_backup_schedules" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore_admin.ListBackupSchedulesRequest.pb( firestore_admin.ListBackupSchedulesRequest() ) @@ -18270,6 +18432,10 @@ def test_list_backup_schedules_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = firestore_admin.ListBackupSchedulesResponse() + post_with_metadata.return_value = ( + firestore_admin.ListBackupSchedulesResponse(), + metadata, + ) client.list_backup_schedules( request, @@ -18281,6 +18447,7 @@ def test_list_backup_schedules_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_backup_schedule_rest_bad_request( @@ -18450,10 +18617,14 @@ def test_update_backup_schedule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirestoreAdminRestInterceptor, "post_update_backup_schedule" ) as post, mock.patch.object( + transports.FirestoreAdminRestInterceptor, + "post_update_backup_schedule_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.FirestoreAdminRestInterceptor, "pre_update_backup_schedule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore_admin.UpdateBackupScheduleRequest.pb( firestore_admin.UpdateBackupScheduleRequest() ) @@ -18477,6 +18648,7 @@ def test_update_backup_schedule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = schedule.BackupSchedule() + post_with_metadata.return_value = schedule.BackupSchedule(), metadata client.update_backup_schedule( request, @@ -18488,6 +18660,7 @@ def test_update_backup_schedule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_backup_schedule_rest_bad_request( diff --git a/tests/unit/gapic/firestore_v1/test_firestore.py b/tests/unit/gapic/firestore_v1/test_firestore.py index e99f5ae4a9..03bdf7342a 100644 --- a/tests/unit/gapic/firestore_v1/test_firestore.py +++ b/tests/unit/gapic/firestore_v1/test_firestore.py @@ -76,6 +76,14 @@ import google.auth +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER chunk = data[i : i + chunk_size] @@ -308,6 +316,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = FirestoreClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = FirestoreClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -9086,10 +9137,13 @@ def test_get_document_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirestoreRestInterceptor, "post_get_document" ) as post, mock.patch.object( + transports.FirestoreRestInterceptor, "post_get_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirestoreRestInterceptor, "pre_get_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore.GetDocumentRequest.pb(firestore.GetDocumentRequest()) transcode.return_value = { "method": "post", @@ -9111,6 +9165,7 @@ def test_get_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document.Document() + post_with_metadata.return_value = document.Document(), metadata client.get_document( request, @@ -9122,6 +9177,7 @@ def test_get_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_documents_rest_bad_request(request_type=firestore.ListDocumentsRequest): @@ -9208,10 +9264,13 @@ def test_list_documents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirestoreRestInterceptor, "post_list_documents" ) as post, mock.patch.object( + transports.FirestoreRestInterceptor, "post_list_documents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirestoreRestInterceptor, "pre_list_documents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore.ListDocumentsRequest.pb(firestore.ListDocumentsRequest()) transcode.return_value = { "method": "post", @@ -9235,6 +9294,7 @@ def test_list_documents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = firestore.ListDocumentsResponse() + post_with_metadata.return_value = firestore.ListDocumentsResponse(), metadata client.list_documents( request, @@ -9246,6 +9306,7 @@ def test_list_documents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_document_rest_bad_request(request_type=firestore.UpdateDocumentRequest): @@ -9407,10 +9468,13 @@ def test_update_document_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirestoreRestInterceptor, "post_update_document" ) as post, mock.patch.object( + transports.FirestoreRestInterceptor, "post_update_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirestoreRestInterceptor, "pre_update_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore.UpdateDocumentRequest.pb( firestore.UpdateDocumentRequest() ) @@ -9434,6 +9498,7 @@ def test_update_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gf_document.Document() + post_with_metadata.return_value = gf_document.Document(), metadata client.update_document( request, @@ -9445,6 +9510,7 @@ def test_update_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_document_rest_bad_request(request_type=firestore.DeleteDocumentRequest): @@ -9641,10 +9707,13 @@ def test_batch_get_documents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirestoreRestInterceptor, "post_batch_get_documents" ) as post, mock.patch.object( + transports.FirestoreRestInterceptor, "post_batch_get_documents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirestoreRestInterceptor, "pre_batch_get_documents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore.BatchGetDocumentsRequest.pb( firestore.BatchGetDocumentsRequest() ) @@ -9670,6 +9739,10 @@ def test_batch_get_documents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = firestore.BatchGetDocumentsResponse() + post_with_metadata.return_value = ( + firestore.BatchGetDocumentsResponse(), + metadata, + ) client.batch_get_documents( request, @@ -9681,6 +9754,7 @@ def test_batch_get_documents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_begin_transaction_rest_bad_request( @@ -9763,10 +9837,13 @@ def test_begin_transaction_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirestoreRestInterceptor, "post_begin_transaction" ) as post, mock.patch.object( + transports.FirestoreRestInterceptor, "post_begin_transaction_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirestoreRestInterceptor, "pre_begin_transaction" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore.BeginTransactionRequest.pb( firestore.BeginTransactionRequest() ) @@ -9792,6 +9869,7 @@ def test_begin_transaction_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = firestore.BeginTransactionResponse() + post_with_metadata.return_value = firestore.BeginTransactionResponse(), metadata client.begin_transaction( request, @@ -9803,6 +9881,7 @@ def test_begin_transaction_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_commit_rest_bad_request(request_type=firestore.CommitRequest): @@ -9880,10 +9959,13 @@ def test_commit_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirestoreRestInterceptor, "post_commit" ) as post, mock.patch.object( + transports.FirestoreRestInterceptor, "post_commit_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirestoreRestInterceptor, "pre_commit" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore.CommitRequest.pb(firestore.CommitRequest()) transcode.return_value = { "method": "post", @@ -9905,6 +9987,7 @@ def test_commit_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = firestore.CommitResponse() + post_with_metadata.return_value = firestore.CommitResponse(), metadata client.commit( request, @@ -9916,6 +9999,7 @@ def test_commit_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_rollback_rest_bad_request(request_type=firestore.RollbackRequest): @@ -10106,10 +10190,13 @@ def test_run_query_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirestoreRestInterceptor, "post_run_query" ) as post, mock.patch.object( + transports.FirestoreRestInterceptor, "post_run_query_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirestoreRestInterceptor, "pre_run_query" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore.RunQueryRequest.pb(firestore.RunQueryRequest()) transcode.return_value = { "method": "post", @@ -10131,6 +10218,7 @@ def test_run_query_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = firestore.RunQueryResponse() + post_with_metadata.return_value = firestore.RunQueryResponse(), metadata client.run_query( request, @@ -10142,6 +10230,7 @@ def test_run_query_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_run_aggregation_query_rest_bad_request( @@ -10228,10 +10317,13 @@ def test_run_aggregation_query_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirestoreRestInterceptor, "post_run_aggregation_query" ) as post, mock.patch.object( + transports.FirestoreRestInterceptor, "post_run_aggregation_query_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirestoreRestInterceptor, "pre_run_aggregation_query" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore.RunAggregationQueryRequest.pb( firestore.RunAggregationQueryRequest() ) @@ -10257,6 +10349,10 @@ def test_run_aggregation_query_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = firestore.RunAggregationQueryResponse() + post_with_metadata.return_value = ( + firestore.RunAggregationQueryResponse(), + metadata, + ) client.run_aggregation_query( request, @@ -10268,6 +10364,7 @@ def test_run_aggregation_query_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_partition_query_rest_bad_request(request_type=firestore.PartitionQueryRequest): @@ -10348,10 +10445,13 @@ def test_partition_query_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirestoreRestInterceptor, "post_partition_query" ) as post, mock.patch.object( + transports.FirestoreRestInterceptor, "post_partition_query_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirestoreRestInterceptor, "pre_partition_query" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore.PartitionQueryRequest.pb( firestore.PartitionQueryRequest() ) @@ -10377,6 +10477,7 @@ def test_partition_query_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = firestore.PartitionQueryResponse() + post_with_metadata.return_value = firestore.PartitionQueryResponse(), metadata client.partition_query( request, @@ -10388,6 +10489,7 @@ def test_partition_query_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_write_rest_error(): @@ -10496,10 +10598,13 @@ def test_list_collection_ids_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirestoreRestInterceptor, "post_list_collection_ids" ) as post, mock.patch.object( + transports.FirestoreRestInterceptor, "post_list_collection_ids_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirestoreRestInterceptor, "pre_list_collection_ids" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore.ListCollectionIdsRequest.pb( firestore.ListCollectionIdsRequest() ) @@ -10525,6 +10630,10 @@ def test_list_collection_ids_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = firestore.ListCollectionIdsResponse() + post_with_metadata.return_value = ( + firestore.ListCollectionIdsResponse(), + metadata, + ) client.list_collection_ids( request, @@ -10536,6 +10645,7 @@ def test_list_collection_ids_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_write_rest_bad_request(request_type=firestore.BatchWriteRequest): @@ -10613,10 +10723,13 @@ def test_batch_write_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirestoreRestInterceptor, "post_batch_write" ) as post, mock.patch.object( + transports.FirestoreRestInterceptor, "post_batch_write_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirestoreRestInterceptor, "pre_batch_write" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore.BatchWriteRequest.pb(firestore.BatchWriteRequest()) transcode.return_value = { "method": "post", @@ -10640,6 +10753,7 @@ def test_batch_write_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = firestore.BatchWriteResponse() + post_with_metadata.return_value = firestore.BatchWriteResponse(), metadata client.batch_write( request, @@ -10651,6 +10765,7 @@ def test_batch_write_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_document_rest_bad_request(request_type=firestore.CreateDocumentRequest): @@ -10810,10 +10925,13 @@ def test_create_document_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirestoreRestInterceptor, "post_create_document" ) as post, mock.patch.object( + transports.FirestoreRestInterceptor, "post_create_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirestoreRestInterceptor, "pre_create_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = firestore.CreateDocumentRequest.pb( firestore.CreateDocumentRequest() ) @@ -10837,6 +10955,7 @@ def test_create_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document.Document() + post_with_metadata.return_value = document.Document(), metadata client.create_document( request, @@ -10848,6 +10967,7 @@ def test_create_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( From 1fcb218e8b7a9384f01fb444c0f9f474156faec4 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 26 Feb 2025 12:33:26 -0800 Subject: [PATCH 23/37] chore(main): release 2.20.1 (#1011) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Daniel Sanche --- .release-please-manifest.json | 2 +- CHANGELOG.md | 9 +++++++++ google/cloud/firestore/gapic_version.py | 2 +- google/cloud/firestore_admin_v1/gapic_version.py | 2 +- google/cloud/firestore_bundle/gapic_version.py | 2 +- google/cloud/firestore_v1/gapic_version.py | 2 +- 6 files changed, 14 insertions(+), 5 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index ba3e06a78b..a95c589d8c 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.20.0" + ".": "2.20.1" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 9cc94e98ba..60cd254ed7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,15 @@ [1]: https://pypi.org/project/google-cloud-firestore/#history +## [2.20.1](https://github.com/googleapis/python-firestore/compare/v2.20.0...v2.20.1) (2025-02-26) + + +### Bug Fixes + +* Bump default deadline on CreateDatabase and RestoreDatabase to 2 minutes ([#975](https://github.com/googleapis/python-firestore/issues/975)) ([995fad6](https://github.com/googleapis/python-firestore/commit/995fad68d7e0da84cc67219d8990397d8329421b)) +* Client-side path validation for batch.update ([#1021](https://github.com/googleapis/python-firestore/issues/1021)) ([3b7595b](https://github.com/googleapis/python-firestore/commit/3b7595b9e3ba4eab1c5a68dd6be3c330247ee18d)) +* Watch thread deadlock on exit ([#1014](https://github.com/googleapis/python-firestore/issues/1014)) ([c47677a](https://github.com/googleapis/python-firestore/commit/c47677a190fcbca88e8c14c81ffb0c40d806f511)) + ## [2.20.0](https://github.com/googleapis/python-firestore/compare/v2.19.0...v2.20.0) (2025-01-13) diff --git a/google/cloud/firestore/gapic_version.py b/google/cloud/firestore/gapic_version.py index 551f0d2eba..5585b0b1a0 100644 --- a/google/cloud/firestore/gapic_version.py +++ b/google/cloud/firestore/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.20.0" # {x-release-please-version} +__version__ = "2.20.1" # {x-release-please-version} diff --git a/google/cloud/firestore_admin_v1/gapic_version.py b/google/cloud/firestore_admin_v1/gapic_version.py index 551f0d2eba..5585b0b1a0 100644 --- a/google/cloud/firestore_admin_v1/gapic_version.py +++ b/google/cloud/firestore_admin_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.20.0" # {x-release-please-version} +__version__ = "2.20.1" # {x-release-please-version} diff --git a/google/cloud/firestore_bundle/gapic_version.py b/google/cloud/firestore_bundle/gapic_version.py index 551f0d2eba..5585b0b1a0 100644 --- a/google/cloud/firestore_bundle/gapic_version.py +++ b/google/cloud/firestore_bundle/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.20.0" # {x-release-please-version} +__version__ = "2.20.1" # {x-release-please-version} diff --git a/google/cloud/firestore_v1/gapic_version.py b/google/cloud/firestore_v1/gapic_version.py index 551f0d2eba..5585b0b1a0 100644 --- a/google/cloud/firestore_v1/gapic_version.py +++ b/google/cloud/firestore_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.20.0" # {x-release-please-version} +__version__ = "2.20.1" # {x-release-please-version} From 1a08c4b3bfa451cde3438e92800f3bb3eb838e64 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 2 Mar 2025 12:20:59 -0500 Subject: [PATCH 24/37] chore: Update gapic-generator-python to v1.23.2 (#1024) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.23.2 PiperOrigin-RevId: 732281673 Source-Link: https://github.com/googleapis/googleapis/commit/2f37e0ad56637325b24f8603284ccb6f05796f9a Source-Link: https://github.com/googleapis/googleapis-gen/commit/016b7538ba5a798f2ae423d4ccd7f82b06cdf6d2 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDE2Yjc1MzhiYTVhNzk4ZjJhZTQyM2Q0Y2NkN2Y4MmIwNmNkZjZkMiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../services/firestore_admin/async_client.py | 115 ++++++++++++++---- .../services/firestore_admin/client.py | 115 ++++++++++++++---- .../services/firestore/async_client.py | 30 ++++- .../firestore_v1/services/firestore/client.py | 30 ++++- 4 files changed, 232 insertions(+), 58 deletions(-) diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py b/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py index 6168934029..a348e4d4be 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py @@ -435,7 +435,10 @@ async def sample_create_index(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, index]) + flattened_params = [parent, index] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -557,7 +560,10 @@ async def sample_list_indexes(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -677,7 +683,10 @@ async def sample_get_index(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -776,7 +785,10 @@ async def sample_delete_index(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -884,7 +896,10 @@ async def sample_get_field(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1019,7 +1034,10 @@ async def sample_update_field(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([field]) + flattened_params = [field] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1149,7 +1167,10 @@ async def sample_list_fields(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1288,7 +1309,10 @@ async def sample_export_documents(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1424,7 +1448,10 @@ async def sample_import_documents(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1565,7 +1592,10 @@ async def sample_bulk_delete_documents(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1711,7 +1741,10 @@ async def sample_create_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, database, database_id]) + flattened_params = [parent, database, database_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1829,7 +1862,10 @@ async def sample_get_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1936,7 +1972,10 @@ async def sample_list_databases(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2054,7 +2093,10 @@ async def sample_update_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([database, update_mask]) + flattened_params = [database, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2181,7 +2223,10 @@ async def sample_delete_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2302,7 +2347,10 @@ async def sample_get_backup(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2414,7 +2462,10 @@ async def sample_list_backups(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2515,7 +2566,10 @@ async def sample_delete_backup(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2759,7 +2813,10 @@ async def sample_create_backup_schedule(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, backup_schedule]) + flattened_params = [parent, backup_schedule] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2875,7 +2932,10 @@ async def sample_get_backup_schedule(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2986,7 +3046,10 @@ async def sample_list_backup_schedules(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3105,7 +3168,10 @@ async def sample_update_backup_schedule(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([backup_schedule, update_mask]) + flattened_params = [backup_schedule, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3212,7 +3278,10 @@ async def sample_delete_backup_schedule(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/client.py b/google/cloud/firestore_admin_v1/services/firestore_admin/client.py index fb91e547ac..51162f9b30 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/client.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/client.py @@ -987,7 +987,10 @@ def sample_create_index(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, index]) + flattened_params = [parent, index] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1106,7 +1109,10 @@ def sample_list_indexes(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1223,7 +1229,10 @@ def sample_get_index(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1319,7 +1328,10 @@ def sample_delete_index(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1424,7 +1436,10 @@ def sample_get_field(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1556,7 +1571,10 @@ def sample_update_field(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([field]) + flattened_params = [field] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1683,7 +1701,10 @@ def sample_list_fields(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1819,7 +1840,10 @@ def sample_export_documents(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1952,7 +1976,10 @@ def sample_import_documents(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2090,7 +2117,10 @@ def sample_bulk_delete_documents(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2233,7 +2263,10 @@ def sample_create_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, database, database_id]) + flattened_params = [parent, database, database_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2348,7 +2381,10 @@ def sample_get_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2452,7 +2488,10 @@ def sample_list_databases(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2567,7 +2606,10 @@ def sample_update_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([database, update_mask]) + flattened_params = [database, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2691,7 +2733,10 @@ def sample_delete_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2809,7 +2854,10 @@ def sample_get_backup(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2918,7 +2966,10 @@ def sample_list_backups(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3016,7 +3067,10 @@ def sample_delete_backup(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3255,7 +3309,10 @@ def sample_create_backup_schedule(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, backup_schedule]) + flattened_params = [parent, backup_schedule] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3368,7 +3425,10 @@ def sample_get_backup_schedule(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3476,7 +3536,10 @@ def sample_list_backup_schedules(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3592,7 +3655,10 @@ def sample_update_backup_schedule(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([backup_schedule, update_mask]) + flattened_params = [backup_schedule, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3696,7 +3762,10 @@ def sample_delete_backup_schedule(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/google/cloud/firestore_v1/services/firestore/async_client.py b/google/cloud/firestore_v1/services/firestore/async_client.py index 231e24532d..5ccbac9c9e 100644 --- a/google/cloud/firestore_v1/services/firestore/async_client.py +++ b/google/cloud/firestore_v1/services/firestore/async_client.py @@ -578,7 +578,10 @@ async def sample_update_document(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, update_mask]) + flattened_params = [document, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -682,7 +685,10 @@ async def sample_delete_document(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -878,7 +884,10 @@ async def sample_begin_transaction(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([database]) + flattened_params = [database] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -996,7 +1005,10 @@ async def sample_commit(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([database, writes]) + flattened_params = [database, writes] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1104,7 +1116,10 @@ async def sample_rollback(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([database, transaction]) + flattened_params = [database, transaction] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1705,7 +1720,10 @@ async def sample_list_collection_ids(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/google/cloud/firestore_v1/services/firestore/client.py b/google/cloud/firestore_v1/services/firestore/client.py index bcb759c182..1831fa3478 100644 --- a/google/cloud/firestore_v1/services/firestore/client.py +++ b/google/cloud/firestore_v1/services/firestore/client.py @@ -976,7 +976,10 @@ def sample_update_document(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, update_mask]) + flattened_params = [document, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1077,7 +1080,10 @@ def sample_delete_document(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1268,7 +1274,10 @@ def sample_begin_transaction(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([database]) + flattened_params = [database] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1383,7 +1392,10 @@ def sample_commit(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([database, writes]) + flattened_params = [database, writes] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1490,7 +1502,10 @@ def sample_rollback(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([database, transaction]) + flattened_params = [database, transaction] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2084,7 +2099,10 @@ def sample_list_collection_ids(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " From f71aba86742d66da458d1ce1189bab902c9c5880 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 10 Mar 2025 11:28:38 -0400 Subject: [PATCH 25/37] chore: remove unused files (#1027) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: remove unused files * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 +- .kokoro/build.sh | 20 +- .kokoro/docker/docs/Dockerfile | 89 -------- .kokoro/docker/docs/fetch_gpg_keys.sh | 45 ---- .kokoro/docker/docs/requirements.in | 2 - .kokoro/docker/docs/requirements.txt | 297 -------------------------- .kokoro/docs/common.cfg | 66 ------ .kokoro/docs/docs-presubmit.cfg | 28 --- .kokoro/docs/docs.cfg | 1 - .kokoro/publish-docs.sh | 58 ----- 10 files changed, 16 insertions(+), 594 deletions(-) delete mode 100644 .kokoro/docker/docs/Dockerfile delete mode 100755 .kokoro/docker/docs/fetch_gpg_keys.sh delete mode 100644 .kokoro/docker/docs/requirements.in delete mode 100644 .kokoro/docker/docs/requirements.txt delete mode 100644 .kokoro/docs/common.cfg delete mode 100644 .kokoro/docs/docs-presubmit.cfg delete mode 100644 .kokoro/docs/docs.cfg delete mode 100755 .kokoro/publish-docs.sh diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 426c977fbd..c631e1f7d7 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:fdc038572b896f739f95cc90e62f16c06e4f2ef0ef3bea343a358331862ad0f0 -# created: 2025-02-13T21:06:55.521673457Z + digest: sha256:5581906b957284864632cde4e9c51d1cc66b0094990b27e689132fe5cd036046 +# created: 2025-03-05 diff --git a/.kokoro/build.sh b/.kokoro/build.sh index cfd7fc4bcb..d84680bd8d 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -15,11 +15,13 @@ set -eo pipefail +CURRENT_DIR=$(dirname "${BASH_SOURCE[0]}") + if [[ -z "${PROJECT_ROOT:-}" ]]; then - PROJECT_ROOT="github/python-firestore" + PROJECT_ROOT=$(realpath "${CURRENT_DIR}/..") fi -cd "${PROJECT_ROOT}" +pushd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -31,10 +33,16 @@ env | grep KOKORO export FIRESTORE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/firebase-credentials.json # Setup service account credentials. -export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +if [[ -f "${KOKORO_GFILE_DIR}/service-account.json" ]] +then + export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +fi # Setup project id. -export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") +if [[ -f "${KOKORO_GFILE_DIR}/project-id.json" ]] +then + export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") +fi # If this is a continuous build, send the test log to the FlakyBot. # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. @@ -49,7 +57,7 @@ fi # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3 -m nox -s ${NOX_SESSION:-} + python3 -m nox -s ${NOX_SESSION:-} else - python3 -m nox + python3 -m nox fi diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile deleted file mode 100644 index e5410e296b..0000000000 --- a/.kokoro/docker/docs/Dockerfile +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from ubuntu:24.04 - -ENV DEBIAN_FRONTEND noninteractive - -# Ensure local Python is preferred over distribution Python. -ENV PATH /usr/local/bin:$PATH - -# Install dependencies. -RUN apt-get update \ - && apt-get install -y --no-install-recommends \ - apt-transport-https \ - build-essential \ - ca-certificates \ - curl \ - dirmngr \ - git \ - gpg-agent \ - graphviz \ - libbz2-dev \ - libdb5.3-dev \ - libexpat1-dev \ - libffi-dev \ - liblzma-dev \ - libreadline-dev \ - libsnappy-dev \ - libssl-dev \ - libsqlite3-dev \ - portaudio19-dev \ - redis-server \ - software-properties-common \ - ssh \ - sudo \ - tcl \ - tcl-dev \ - tk \ - tk-dev \ - uuid-dev \ - wget \ - zlib1g-dev \ - && add-apt-repository universe \ - && apt-get update \ - && apt-get -y install jq \ - && apt-get clean autoclean \ - && apt-get autoremove -y \ - && rm -rf /var/lib/apt/lists/* \ - && rm -f /var/cache/apt/archives/*.deb - - -###################### Install python 3.10.14 for docs/docfx session - -# Download python 3.10.14 -RUN wget https://www.python.org/ftp/python/3.10.14/Python-3.10.14.tgz - -# Extract files -RUN tar -xvf Python-3.10.14.tgz - -# Install python 3.10.14 -RUN ./Python-3.10.14/configure --enable-optimizations -RUN make altinstall - -ENV PATH /usr/local/bin/python3.10:$PATH - -###################### Install pip -RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3.10 /tmp/get-pip.py \ - && rm /tmp/get-pip.py - -# Test pip -RUN python3.10 -m pip - -# Install build requirements -COPY requirements.txt /requirements.txt -RUN python3.10 -m pip install --require-hashes -r requirements.txt - -CMD ["python3.10"] diff --git a/.kokoro/docker/docs/fetch_gpg_keys.sh b/.kokoro/docker/docs/fetch_gpg_keys.sh deleted file mode 100755 index d653dd868e..0000000000 --- a/.kokoro/docker/docs/fetch_gpg_keys.sh +++ /dev/null @@ -1,45 +0,0 @@ -#!/bin/bash -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# A script to fetch gpg keys with retry. -# Avoid jinja parsing the file. -# - -function retry { - if [[ "${#}" -le 1 ]]; then - echo "Usage: ${0} retry_count commands.." - exit 1 - fi - local retries=${1} - local command="${@:2}" - until [[ "${retries}" -le 0 ]]; do - $command && return 0 - if [[ $? -ne 0 ]]; then - echo "command failed, retrying" - ((retries--)) - fi - done - return 1 -} - -# 3.6.9, 3.7.5 (Ned Deily) -retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ - 0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D - -# 3.8.0 (Łukasz Langa) -retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ - E3FF2839C048B25C084DEBE9B26995E310250568 - -# diff --git a/.kokoro/docker/docs/requirements.in b/.kokoro/docker/docs/requirements.in deleted file mode 100644 index 586bd07037..0000000000 --- a/.kokoro/docker/docs/requirements.in +++ /dev/null @@ -1,2 +0,0 @@ -nox -gcp-docuploader diff --git a/.kokoro/docker/docs/requirements.txt b/.kokoro/docker/docs/requirements.txt deleted file mode 100644 index a9360a25b7..0000000000 --- a/.kokoro/docker/docs/requirements.txt +++ /dev/null @@ -1,297 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --allow-unsafe --generate-hashes requirements.in -# -argcomplete==3.5.3 \ - --hash=sha256:2ab2c4a215c59fd6caaff41a869480a23e8f6a5f910b266c1808037f4e375b61 \ - --hash=sha256:c12bf50eded8aebb298c7b7da7a5ff3ee24dffd9f5281867dfe1424b58c55392 - # via nox -cachetools==5.5.0 \ - --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ - --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a - # via google-auth -certifi==2024.12.14 \ - --hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56 \ - --hash=sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db - # via requests -charset-normalizer==3.4.1 \ - --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \ - --hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \ - --hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \ - --hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \ - --hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \ - --hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \ - --hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \ - --hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \ - --hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \ - --hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \ - --hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \ - --hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \ - --hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \ - --hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \ - --hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \ - --hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \ - --hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \ - --hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \ - --hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \ - --hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \ - --hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \ - --hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \ - --hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \ - --hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \ - --hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \ - --hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \ - --hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \ - --hash=sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3 \ - --hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \ - --hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \ - --hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \ - --hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \ - --hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \ - --hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \ - --hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \ - --hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \ - --hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \ - --hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \ - --hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \ - --hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \ - --hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \ - --hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \ - --hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \ - --hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \ - --hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \ - --hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \ - --hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \ - --hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \ - --hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \ - --hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \ - --hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \ - --hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \ - --hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \ - --hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \ - --hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \ - --hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \ - --hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \ - --hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \ - --hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \ - --hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \ - --hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \ - --hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \ - --hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \ - --hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \ - --hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \ - --hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \ - --hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \ - --hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \ - --hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \ - --hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \ - --hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \ - --hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \ - --hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \ - --hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \ - --hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \ - --hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \ - --hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \ - --hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \ - --hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \ - --hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \ - --hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \ - --hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \ - --hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \ - --hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \ - --hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \ - --hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \ - --hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \ - --hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \ - --hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \ - --hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \ - --hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \ - --hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616 - # via requests -click==8.1.8 \ - --hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \ - --hash=sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a - # via gcp-docuploader -colorlog==6.9.0 \ - --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \ - --hash=sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2 - # via - # gcp-docuploader - # nox -distlib==0.3.9 \ - --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ - --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 - # via virtualenv -filelock==3.16.1 \ - --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ - --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 - # via virtualenv -gcp-docuploader==0.6.5 \ - --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ - --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea - # via -r requirements.in -google-api-core==2.24.0 \ - --hash=sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9 \ - --hash=sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf - # via - # google-cloud-core - # google-cloud-storage -google-auth==2.37.0 \ - --hash=sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00 \ - --hash=sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0 - # via - # google-api-core - # google-cloud-core - # google-cloud-storage -google-cloud-core==2.4.1 \ - --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ - --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 - # via google-cloud-storage -google-cloud-storage==2.19.0 \ - --hash=sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba \ - --hash=sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2 - # via gcp-docuploader -google-crc32c==1.6.0 \ - --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \ - --hash=sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d \ - --hash=sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e \ - --hash=sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57 \ - --hash=sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2 \ - --hash=sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8 \ - --hash=sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc \ - --hash=sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42 \ - --hash=sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f \ - --hash=sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa \ - --hash=sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b \ - --hash=sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc \ - --hash=sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760 \ - --hash=sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d \ - --hash=sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7 \ - --hash=sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d \ - --hash=sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0 \ - --hash=sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3 \ - --hash=sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3 \ - --hash=sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00 \ - --hash=sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871 \ - --hash=sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c \ - --hash=sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9 \ - --hash=sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205 \ - --hash=sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc \ - --hash=sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d \ - --hash=sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4 - # via - # google-cloud-storage - # google-resumable-media -google-resumable-media==2.7.2 \ - --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \ - --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0 - # via google-cloud-storage -googleapis-common-protos==1.66.0 \ - --hash=sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c \ - --hash=sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed - # via google-api-core -idna==3.10 \ - --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ - --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 - # via requests -nox==2024.10.9 \ - --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ - --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 - # via -r requirements.in -packaging==24.2 \ - --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ - --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f - # via nox -platformdirs==4.3.6 \ - --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ - --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb - # via virtualenv -proto-plus==1.25.0 \ - --hash=sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961 \ - --hash=sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91 - # via google-api-core -protobuf==5.29.3 \ - --hash=sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f \ - --hash=sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7 \ - --hash=sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888 \ - --hash=sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620 \ - --hash=sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da \ - --hash=sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252 \ - --hash=sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a \ - --hash=sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e \ - --hash=sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107 \ - --hash=sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f \ - --hash=sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84 - # via - # gcp-docuploader - # google-api-core - # googleapis-common-protos - # proto-plus -pyasn1==0.6.1 \ - --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \ - --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034 - # via - # pyasn1-modules - # rsa -pyasn1-modules==0.4.1 \ - --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ - --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c - # via google-auth -requests==2.32.3 \ - --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ - --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 - # via - # google-api-core - # google-cloud-storage -rsa==4.9 \ - --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ - --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 - # via google-auth -six==1.17.0 \ - --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \ - --hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81 - # via gcp-docuploader -tomli==2.2.1 \ - --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ - --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ - --hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \ - --hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \ - --hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \ - --hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \ - --hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \ - --hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \ - --hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \ - --hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \ - --hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \ - --hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \ - --hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \ - --hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \ - --hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \ - --hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \ - --hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \ - --hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \ - --hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \ - --hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \ - --hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \ - --hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \ - --hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \ - --hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \ - --hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \ - --hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \ - --hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \ - --hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \ - --hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \ - --hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \ - --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ - --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 - # via nox -urllib3==2.3.0 \ - --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \ - --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d - # via requests -virtualenv==20.28.1 \ - --hash=sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb \ - --hash=sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329 - # via nox diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg deleted file mode 100644 index 075cc0ebb8..0000000000 --- a/.kokoro/docs/common.cfg +++ /dev/null @@ -1,66 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-firestore/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-firestore/.kokoro/publish-docs.sh" -} - -env_vars: { - key: "STAGING_BUCKET" - value: "docs-staging" -} - -env_vars: { - key: "V2_STAGING_BUCKET" - # Push google cloud library docs to the Cloud RAD bucket `docs-staging-v2` - value: "docs-staging-v2" -} - -# It will upload the docker image after successful builds. -env_vars: { - key: "TRAMPOLINE_IMAGE_UPLOAD" - value: "true" -} - -# It will always build the docker image. -env_vars: { - key: "TRAMPOLINE_DOCKERFILE" - value: ".kokoro/docker/docs/Dockerfile" -} - -# Fetch the token needed for reporting release status to GitHub -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "yoshi-automation-github-key" - } - } -} - -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "docuploader_service_account" - } - } -} diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg deleted file mode 100644 index 2e8a0735a6..0000000000 --- a/.kokoro/docs/docs-presubmit.cfg +++ /dev/null @@ -1,28 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "STAGING_BUCKET" - value: "gcloud-python-test" -} - -env_vars: { - key: "V2_STAGING_BUCKET" - value: "gcloud-python-test" -} - -# We only upload the image in the main `docs` build. -env_vars: { - key: "TRAMPOLINE_IMAGE_UPLOAD" - value: "false" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-firestore/.kokoro/build.sh" -} - -# Only run this nox session. -env_vars: { - key: "NOX_SESSION" - value: "docs docfx" -} diff --git a/.kokoro/docs/docs.cfg b/.kokoro/docs/docs.cfg deleted file mode 100644 index 8f43917d92..0000000000 --- a/.kokoro/docs/docs.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh deleted file mode 100755 index 4ed4aaf134..0000000000 --- a/.kokoro/publish-docs.sh +++ /dev/null @@ -1,58 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -export PATH="${HOME}/.local/bin:${PATH}" - -# build docs -nox -s docs - -# create metadata -python3.10 -m docuploader create-metadata \ - --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3.10 setup.py --version) \ - --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3.10 setup.py --name) \ - --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ - --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ - --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) - -cat docs.metadata - -# upload docs -python3.10 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" - - -# docfx yaml files -nox -s docfx - -# create metadata. -python3.10 -m docuploader create-metadata \ - --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3.10 setup.py --version) \ - --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3.10 setup.py --name) \ - --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ - --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ - --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) - -cat docs.metadata - -# upload docs -python3.10 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" From 17668b789f447f057962864a93e1b7b7742ef1e3 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 17 Mar 2025 11:09:41 -0400 Subject: [PATCH 26/37] fix: allow Protobuf 6.x (#1028) --- setup.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/setup.py b/setup.py index 635d95eb48..2a47080a15 100644 --- a/setup.py +++ b/setup.py @@ -30,15 +30,15 @@ version = version["__version__"] release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-api-core[grpc] >= 1.34.0, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", # Exclude incompatible versions of `google-auth` # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "google-cloud-core >= 1.4.1, <3.0.0dev", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "proto-plus >= 1.25.0, <2.0.0dev; python_version>='3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "google-cloud-core >= 1.4.1, <3.0.0", + "proto-plus >= 1.22.0, <2.0.0", + "proto-plus >= 1.22.2, <2.0.0; python_version>='3.11'", + "proto-plus >= 1.25.0, <2.0.0; python_version>='3.13'", + "protobuf>=3.20.2,<7.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] extras = {} From 79a4a88626786ce85682ab6c8d748633e20aeab4 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 19 Mar 2025 10:52:23 -0400 Subject: [PATCH 27/37] chore: Update gapic-generator-python to 1.23.6 (#1032) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to 1.23.6 PiperOrigin-RevId: 738170370 Source-Link: https://github.com/googleapis/googleapis/commit/3f1e17aa2dec3f146a9a2a8a64c5c6d19d0b6e15 Source-Link: https://github.com/googleapis/googleapis-gen/commit/9afd8c33d4cae610b75fa4999264ea8c8c66b9d2 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOWFmZDhjMzNkNGNhZTYxMGI3NWZhNDk5OTI2NGVhOGM4YzY2YjlkMiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- google/cloud/firestore_admin_v1/services/__init__.py | 2 +- .../firestore_admin_v1/services/firestore_admin/__init__.py | 2 +- .../firestore_admin_v1/services/firestore_admin/async_client.py | 2 +- .../cloud/firestore_admin_v1/services/firestore_admin/client.py | 2 +- .../cloud/firestore_admin_v1/services/firestore_admin/pagers.py | 2 +- .../services/firestore_admin/transports/__init__.py | 2 +- .../services/firestore_admin/transports/base.py | 2 +- .../services/firestore_admin/transports/grpc.py | 2 +- .../services/firestore_admin/transports/grpc_asyncio.py | 2 +- .../services/firestore_admin/transports/rest.py | 2 +- .../services/firestore_admin/transports/rest_base.py | 2 +- google/cloud/firestore_admin_v1/types/__init__.py | 2 +- google/cloud/firestore_admin_v1/types/backup.py | 2 +- google/cloud/firestore_admin_v1/types/database.py | 2 +- google/cloud/firestore_admin_v1/types/field.py | 2 +- google/cloud/firestore_admin_v1/types/firestore_admin.py | 2 +- google/cloud/firestore_admin_v1/types/index.py | 2 +- google/cloud/firestore_admin_v1/types/location.py | 2 +- google/cloud/firestore_admin_v1/types/operation.py | 2 +- google/cloud/firestore_admin_v1/types/schedule.py | 2 +- google/cloud/firestore_bundle/__init__.py | 2 +- google/cloud/firestore_bundle/services/__init__.py | 2 +- google/cloud/firestore_bundle/types/__init__.py | 2 +- google/cloud/firestore_bundle/types/bundle.py | 2 +- google/cloud/firestore_v1/services/__init__.py | 2 +- google/cloud/firestore_v1/services/firestore/__init__.py | 2 +- google/cloud/firestore_v1/services/firestore/async_client.py | 2 +- google/cloud/firestore_v1/services/firestore/client.py | 2 +- google/cloud/firestore_v1/services/firestore/pagers.py | 2 +- .../firestore_v1/services/firestore/transports/__init__.py | 2 +- google/cloud/firestore_v1/services/firestore/transports/base.py | 2 +- google/cloud/firestore_v1/services/firestore/transports/grpc.py | 2 +- .../firestore_v1/services/firestore/transports/grpc_asyncio.py | 2 +- google/cloud/firestore_v1/services/firestore/transports/rest.py | 2 +- .../firestore_v1/services/firestore/transports/rest_base.py | 2 +- google/cloud/firestore_v1/types/__init__.py | 2 +- google/cloud/firestore_v1/types/aggregation_result.py | 2 +- google/cloud/firestore_v1/types/bloom_filter.py | 2 +- google/cloud/firestore_v1/types/common.py | 2 +- google/cloud/firestore_v1/types/document.py | 2 +- google/cloud/firestore_v1/types/firestore.py | 2 +- google/cloud/firestore_v1/types/query.py | 2 +- google/cloud/firestore_v1/types/query_profile.py | 2 +- google/cloud/firestore_v1/types/write.py | 2 +- scripts/fixup_firestore_admin_v1_keywords.py | 2 +- scripts/fixup_firestore_v1_keywords.py | 2 +- tests/__init__.py | 2 +- tests/unit/__init__.py | 2 +- tests/unit/gapic/__init__.py | 2 +- tests/unit/gapic/bundle/__init__.py | 2 +- tests/unit/gapic/firestore_admin_v1/__init__.py | 2 +- tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py | 2 +- tests/unit/gapic/firestore_v1/__init__.py | 2 +- tests/unit/gapic/firestore_v1/test_firestore.py | 2 +- 54 files changed, 54 insertions(+), 54 deletions(-) diff --git a/google/cloud/firestore_admin_v1/services/__init__.py b/google/cloud/firestore_admin_v1/services/__init__.py index 8f6cf06824..cbf94b283c 100644 --- a/google/cloud/firestore_admin_v1/services/__init__.py +++ b/google/cloud/firestore_admin_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/__init__.py b/google/cloud/firestore_admin_v1/services/firestore_admin/__init__.py index d2b44fdc19..41b9d63a9f 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/__init__.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py b/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py index a348e4d4be..bd903a17d4 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/client.py b/google/cloud/firestore_admin_v1/services/firestore_admin/client.py index 51162f9b30..9c80fabfc7 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/client.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py b/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py index 3520d07727..ee8737c19e 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py index 1bb83fe3f5..36eaee23d7 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py index 2014cc0cb9..a209ae1697 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py index 9fbddcef35..4327bd1ff2 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py index 5c0827e21b..a06b00befb 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest.py index cc62029d0e..94ef123c58 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest_base.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest_base.py index 66b429c065..e1309157e0 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest_base.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest_base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_admin_v1/types/__init__.py b/google/cloud/firestore_admin_v1/types/__init__.py index c1ae35fbf0..0d8d69fa9a 100644 --- a/google/cloud/firestore_admin_v1/types/__init__.py +++ b/google/cloud/firestore_admin_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_admin_v1/types/backup.py b/google/cloud/firestore_admin_v1/types/backup.py index f60a92a811..02c594a223 100644 --- a/google/cloud/firestore_admin_v1/types/backup.py +++ b/google/cloud/firestore_admin_v1/types/backup.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_admin_v1/types/database.py b/google/cloud/firestore_admin_v1/types/database.py index 32901f729f..778aa84e4f 100644 --- a/google/cloud/firestore_admin_v1/types/database.py +++ b/google/cloud/firestore_admin_v1/types/database.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_admin_v1/types/field.py b/google/cloud/firestore_admin_v1/types/field.py index f878b63313..824a9c87f5 100644 --- a/google/cloud/firestore_admin_v1/types/field.py +++ b/google/cloud/firestore_admin_v1/types/field.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_admin_v1/types/firestore_admin.py b/google/cloud/firestore_admin_v1/types/firestore_admin.py index 28a94bc5aa..ca3c4f9729 100644 --- a/google/cloud/firestore_admin_v1/types/firestore_admin.py +++ b/google/cloud/firestore_admin_v1/types/firestore_admin.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_admin_v1/types/index.py b/google/cloud/firestore_admin_v1/types/index.py index 716213fd22..c317ac38d6 100644 --- a/google/cloud/firestore_admin_v1/types/index.py +++ b/google/cloud/firestore_admin_v1/types/index.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_admin_v1/types/location.py b/google/cloud/firestore_admin_v1/types/location.py index 657c037703..94ec176395 100644 --- a/google/cloud/firestore_admin_v1/types/location.py +++ b/google/cloud/firestore_admin_v1/types/location.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_admin_v1/types/operation.py b/google/cloud/firestore_admin_v1/types/operation.py index c3e59d10bf..c58f242733 100644 --- a/google/cloud/firestore_admin_v1/types/operation.py +++ b/google/cloud/firestore_admin_v1/types/operation.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_admin_v1/types/schedule.py b/google/cloud/firestore_admin_v1/types/schedule.py index eb7b138999..a767edfe1f 100644 --- a/google/cloud/firestore_admin_v1/types/schedule.py +++ b/google/cloud/firestore_admin_v1/types/schedule.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_bundle/__init__.py b/google/cloud/firestore_bundle/__init__.py index 79e36edd76..1b6469437b 100644 --- a/google/cloud/firestore_bundle/__init__.py +++ b/google/cloud/firestore_bundle/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_bundle/services/__init__.py b/google/cloud/firestore_bundle/services/__init__.py index 8f6cf06824..cbf94b283c 100644 --- a/google/cloud/firestore_bundle/services/__init__.py +++ b/google/cloud/firestore_bundle/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_bundle/types/__init__.py b/google/cloud/firestore_bundle/types/__init__.py index 0ebbc0204b..2cc8d9fb94 100644 --- a/google/cloud/firestore_bundle/types/__init__.py +++ b/google/cloud/firestore_bundle/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_bundle/types/bundle.py b/google/cloud/firestore_bundle/types/bundle.py index 4b5e01e4e1..3671833d9b 100644 --- a/google/cloud/firestore_bundle/types/bundle.py +++ b/google/cloud/firestore_bundle/types/bundle.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_v1/services/__init__.py b/google/cloud/firestore_v1/services/__init__.py index 8f6cf06824..cbf94b283c 100644 --- a/google/cloud/firestore_v1/services/__init__.py +++ b/google/cloud/firestore_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_v1/services/firestore/__init__.py b/google/cloud/firestore_v1/services/firestore/__init__.py index a33859857e..a69a11b29e 100644 --- a/google/cloud/firestore_v1/services/firestore/__init__.py +++ b/google/cloud/firestore_v1/services/firestore/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_v1/services/firestore/async_client.py b/google/cloud/firestore_v1/services/firestore/async_client.py index 5ccbac9c9e..56cf7d3af3 100644 --- a/google/cloud/firestore_v1/services/firestore/async_client.py +++ b/google/cloud/firestore_v1/services/firestore/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_v1/services/firestore/client.py b/google/cloud/firestore_v1/services/firestore/client.py index 1831fa3478..1fb800e616 100644 --- a/google/cloud/firestore_v1/services/firestore/client.py +++ b/google/cloud/firestore_v1/services/firestore/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_v1/services/firestore/pagers.py b/google/cloud/firestore_v1/services/firestore/pagers.py index 4e158080da..be9e4b7142 100644 --- a/google/cloud/firestore_v1/services/firestore/pagers.py +++ b/google/cloud/firestore_v1/services/firestore/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_v1/services/firestore/transports/__init__.py b/google/cloud/firestore_v1/services/firestore/transports/__init__.py index f32c361e09..f3ca95f79c 100644 --- a/google/cloud/firestore_v1/services/firestore/transports/__init__.py +++ b/google/cloud/firestore_v1/services/firestore/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_v1/services/firestore/transports/base.py b/google/cloud/firestore_v1/services/firestore/transports/base.py index f86482ce3d..862b098d1b 100644 --- a/google/cloud/firestore_v1/services/firestore/transports/base.py +++ b/google/cloud/firestore_v1/services/firestore/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_v1/services/firestore/transports/grpc.py b/google/cloud/firestore_v1/services/firestore/transports/grpc.py index 02f2ab682c..35f4bf75fa 100644 --- a/google/cloud/firestore_v1/services/firestore/transports/grpc.py +++ b/google/cloud/firestore_v1/services/firestore/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py b/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py index 3ce6c9b317..f461622962 100644 --- a/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py +++ b/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_v1/services/firestore/transports/rest.py b/google/cloud/firestore_v1/services/firestore/transports/rest.py index 250a766904..3794ecea38 100644 --- a/google/cloud/firestore_v1/services/firestore/transports/rest.py +++ b/google/cloud/firestore_v1/services/firestore/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_v1/services/firestore/transports/rest_base.py b/google/cloud/firestore_v1/services/firestore/transports/rest_base.py index 0b55ef7f59..1d95cd16ea 100644 --- a/google/cloud/firestore_v1/services/firestore/transports/rest_base.py +++ b/google/cloud/firestore_v1/services/firestore/transports/rest_base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_v1/types/__init__.py b/google/cloud/firestore_v1/types/__init__.py index 433c8a012b..ae1004e132 100644 --- a/google/cloud/firestore_v1/types/__init__.py +++ b/google/cloud/firestore_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_v1/types/aggregation_result.py b/google/cloud/firestore_v1/types/aggregation_result.py index 1fbe2988d0..3c649dc8a2 100644 --- a/google/cloud/firestore_v1/types/aggregation_result.py +++ b/google/cloud/firestore_v1/types/aggregation_result.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_v1/types/bloom_filter.py b/google/cloud/firestore_v1/types/bloom_filter.py index 3c92b21733..f38386cbe1 100644 --- a/google/cloud/firestore_v1/types/bloom_filter.py +++ b/google/cloud/firestore_v1/types/bloom_filter.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_v1/types/common.py b/google/cloud/firestore_v1/types/common.py index cecb1b6100..01fb3d2633 100644 --- a/google/cloud/firestore_v1/types/common.py +++ b/google/cloud/firestore_v1/types/common.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_v1/types/document.py b/google/cloud/firestore_v1/types/document.py index 4def67f9a2..0942354f50 100644 --- a/google/cloud/firestore_v1/types/document.py +++ b/google/cloud/firestore_v1/types/document.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_v1/types/firestore.py b/google/cloud/firestore_v1/types/firestore.py index 22388676f9..53a6c6e7af 100644 --- a/google/cloud/firestore_v1/types/firestore.py +++ b/google/cloud/firestore_v1/types/firestore.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_v1/types/query.py b/google/cloud/firestore_v1/types/query.py index 8b21fb6420..9aa8977ddb 100644 --- a/google/cloud/firestore_v1/types/query.py +++ b/google/cloud/firestore_v1/types/query.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_v1/types/query_profile.py b/google/cloud/firestore_v1/types/query_profile.py index 0b26236cf0..f93184ae39 100644 --- a/google/cloud/firestore_v1/types/query_profile.py +++ b/google/cloud/firestore_v1/types/query_profile.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/firestore_v1/types/write.py b/google/cloud/firestore_v1/types/write.py index 8b12cced20..e393b91480 100644 --- a/google/cloud/firestore_v1/types/write.py +++ b/google/cloud/firestore_v1/types/write.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/scripts/fixup_firestore_admin_v1_keywords.py b/scripts/fixup_firestore_admin_v1_keywords.py index 1c24796702..dc7a89f764 100644 --- a/scripts/fixup_firestore_admin_v1_keywords.py +++ b/scripts/fixup_firestore_admin_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/scripts/fixup_firestore_v1_keywords.py b/scripts/fixup_firestore_v1_keywords.py index 5798fe0ab6..6481e76bb7 100644 --- a/scripts/fixup_firestore_v1_keywords.py +++ b/scripts/fixup_firestore_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/__init__.py b/tests/__init__.py index 8f6cf06824..cbf94b283c 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py index 8f6cf06824..cbf94b283c 100644 --- a/tests/unit/__init__.py +++ b/tests/unit/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py index 8f6cf06824..cbf94b283c 100644 --- a/tests/unit/gapic/__init__.py +++ b/tests/unit/gapic/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/bundle/__init__.py b/tests/unit/gapic/bundle/__init__.py index 8f6cf06824..cbf94b283c 100644 --- a/tests/unit/gapic/bundle/__init__.py +++ b/tests/unit/gapic/bundle/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/firestore_admin_v1/__init__.py b/tests/unit/gapic/firestore_admin_v1/__init__.py index 8f6cf06824..cbf94b283c 100644 --- a/tests/unit/gapic/firestore_admin_v1/__init__.py +++ b/tests/unit/gapic/firestore_admin_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py b/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py index 85a1119297..0910287769 100644 --- a/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py +++ b/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/firestore_v1/__init__.py b/tests/unit/gapic/firestore_v1/__init__.py index 8f6cf06824..cbf94b283c 100644 --- a/tests/unit/gapic/firestore_v1/__init__.py +++ b/tests/unit/gapic/firestore_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/firestore_v1/test_firestore.py b/tests/unit/gapic/firestore_v1/test_firestore.py index 03bdf7342a..eac609cab4 100644 --- a/tests/unit/gapic/firestore_v1/test_firestore.py +++ b/tests/unit/gapic/firestore_v1/test_firestore.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 4061cf7317b1bafb9f667c261f721106ce20488d Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 21 Mar 2025 20:32:26 -0400 Subject: [PATCH 28/37] fix: remove setup.cfg configuration for creating universal wheels (#1030) Co-authored-by: Daniel Sanche --- setup.cfg | 3 --- 1 file changed, 3 deletions(-) diff --git a/setup.cfg b/setup.cfg index dca8eee85b..d28ceb6d8e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -14,9 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Generated by synthtool. DO NOT EDIT! -[bdist_wheel] -universal = 1 [pytype] python_version = 3.8 inputs = From 5957febb82ff6d2b98789600bce078b6edb469f9 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 7 Apr 2025 11:43:48 -0700 Subject: [PATCH 29/37] chore(docs): add BulkWriter to docs (#1033) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(docs): add BulkWriter to docs * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * removed unintentional files --------- Co-authored-by: Owl Bot --- docs/firestore_v1/bulk_writer.rst | 6 ++++++ docs/index.rst | 1 + google/cloud/firestore_v1/bulk_writer.py | 2 ++ 3 files changed, 9 insertions(+) create mode 100644 docs/firestore_v1/bulk_writer.rst diff --git a/docs/firestore_v1/bulk_writer.rst b/docs/firestore_v1/bulk_writer.rst new file mode 100644 index 0000000000..e20e4c5325 --- /dev/null +++ b/docs/firestore_v1/bulk_writer.rst @@ -0,0 +1,6 @@ +Bulk Writer +~~~~~~~~~~~ + +.. autoclass:: google.cloud.firestore_v1.bulk_writer.BulkWriter + :members: + :show-inheritance: diff --git a/docs/index.rst b/docs/index.rst index 2b6b999ea7..58517e0f11 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -14,6 +14,7 @@ API Reference firestore_v1/aggregation firestore_v1/batch + firestore_v1/bulk_writer firestore_v1/client firestore_v1/collection firestore_v1/document diff --git a/google/cloud/firestore_v1/bulk_writer.py b/google/cloud/firestore_v1/bulk_writer.py index ec0fa4881f..eff936300d 100644 --- a/google/cloud/firestore_v1/bulk_writer.py +++ b/google/cloud/firestore_v1/bulk_writer.py @@ -228,6 +228,8 @@ class BulkWriter(AsyncBulkWriterMixin): Usage: + .. code-block:: python + # Instantiate the BulkWriter. This works from either `Client` or # `AsyncClient`. db = firestore.Client() From e5e416ae88e01b32128973c34cd5391a12054b24 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 10 Apr 2025 12:10:27 -0400 Subject: [PATCH 30/37] chore(python): fix incorrect import statement in README (#1034) Source-Link: https://github.com/googleapis/synthtool/commit/87677404f85cee860588ebe2c352d0609f683d5d Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:023a21377a2a00008057f99f0118edadc30a19d1636a3fee47189ebec2f3921c Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- README.rst | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index c631e1f7d7..c4e82889dc 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5581906b957284864632cde4e9c51d1cc66b0094990b27e689132fe5cd036046 -# created: 2025-03-05 + digest: sha256:023a21377a2a00008057f99f0118edadc30a19d1636a3fee47189ebec2f3921c +# created: 2025-03-31T16:51:40.130756953Z diff --git a/README.rst b/README.rst index 0171769aa9..e349bf7831 100644 --- a/README.rst +++ b/README.rst @@ -162,7 +162,7 @@ Code-Based Examples import logging - from google.cloud.translate_v3 import translate + from google.cloud import library_v1 base_logger = logging.getLogger("google") base_logger.addHandler(logging.StreamHandler()) @@ -174,7 +174,7 @@ Code-Based Examples import logging - from google.cloud.translate_v3 import translate + from google.cloud import library_v1 base_logger = logging.getLogger("google.cloud.library_v1") base_logger.addHandler(logging.StreamHandler()) From 79cfc5d4e83bf4b18ae03acb4092fe246cd72567 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 10 Apr 2025 13:47:40 -0400 Subject: [PATCH 31/37] chore(python): remove .gitignore from templates (#1036) * chore(python): remove .gitignore from templates Source-Link: https://github.com/googleapis/synthtool/commit/419d94cdddd0d859ac6743ffebd177693c8a027f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:a7aef70df5f13313ddc027409fc8f3151422ec2a57ac8730fce8fa75c060d5bb * Remove replacement in owlbot.py --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/.OwlBot.lock.yaml | 4 ++-- owlbot.py | 25 ------------------------- 2 files changed, 2 insertions(+), 27 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index c4e82889dc..51b21a62b7 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:023a21377a2a00008057f99f0118edadc30a19d1636a3fee47189ebec2f3921c -# created: 2025-03-31T16:51:40.130756953Z + digest: sha256:a7aef70df5f13313ddc027409fc8f3151422ec2a57ac8730fce8fa75c060d5bb +# created: 2025-04-10T17:00:10.042601326Z diff --git a/owlbot.py b/owlbot.py index 45e3f7a70d..e7a80b28ff 100644 --- a/owlbot.py +++ b/owlbot.py @@ -239,31 +239,6 @@ def system_emulated(session): system_test""", ) -# Add pytype support -s.replace( - ".gitignore", - """\ -.pytest_cache -""", - """\ -.pytest_cache -.pytype -""", -) - -s.replace( - ".gitignore", - """\ -pylintrc -pylintrc.test -""", - """\ -pylintrc -pylintrc.test -.make/** -""", -) - s.replace( "noxfile.py", """\ From 32e729d2b0e168b98f75fc2f60d2fa1db0a77ddc Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 10 Apr 2025 12:00:28 -0700 Subject: [PATCH 32/37] chore(python): remove CONTRIBUTING.rst from templates (#1038) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(python): remove CONTRIBUTING.rst from templates Source-Link: https://github.com/googleapis/synthtool/commit/c96fb118e03c2b50d50fe17c1d0845479a0cfa9a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:710b70faff81151657d89db6e028c23a1051787598c8276bdd8eef25c92da8ab * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Remove replacement in owlbot.py --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/.OwlBot.lock.yaml | 4 ++-- owlbot.py | 19 ------------------- 2 files changed, 2 insertions(+), 21 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 51b21a62b7..8bc6405eca 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:a7aef70df5f13313ddc027409fc8f3151422ec2a57ac8730fce8fa75c060d5bb -# created: 2025-04-10T17:00:10.042601326Z + digest: sha256:710b70faff81151657d89db6e028c23a1051787598c8276bdd8eef25c92da8ab +# created: 2025-04-10T17:48:54.829145676Z diff --git a/owlbot.py b/owlbot.py index e7a80b28ff..22ece4829f 100644 --- a/owlbot.py +++ b/owlbot.py @@ -290,23 +290,4 @@ def lint_setup_py(session): # Setup service account credentials.""", ) - -# Add a section on updating conformance tests to contributing. -s.replace( - "CONTRIBUTING.rst", - "\nTest Coverage", - """************* -Updating Conformance Tests -************************** - -The firestore client libraries use a shared set of conformance tests, the source of which can be found at https://github.com/googleapis/conformance-tests. - -To update the copy of these conformance tests used by this repository, run the provided Makefile: - - $ make -f Makefile_v1 - -************* -Test Coverage""" -) - s.replace("noxfile.py", "\"pytest-asyncio\"", "\"pytest-asyncio==0.21.2\"") From 4c69636c5e450628d731e073df40d9654d224204 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 14 Apr 2025 12:11:23 -0400 Subject: [PATCH 33/37] chore(python): remove noxfile.py from templates (#1041) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(python): remove noxfile.py from templates Source-Link: https://github.com/googleapis/synthtool/commit/776580213a73a04a3ff4fe2ed7f35c7f3d63a882 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:25de45b58e52021d3a24a6273964371a97a4efeefe6ad3845a64e697c63b6447 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * remove replacements in owlbot.py --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/.OwlBot.lock.yaml | 4 +- owlbot.py | 135 +------------------------------------- 2 files changed, 5 insertions(+), 134 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 8bc6405eca..508ba98efe 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:710b70faff81151657d89db6e028c23a1051787598c8276bdd8eef25c92da8ab -# created: 2025-04-10T17:48:54.829145676Z + digest: sha256:25de45b58e52021d3a24a6273964371a97a4efeefe6ad3845a64e697c63b6447 +# created: 2025-04-14T14:34:43.260858345Z diff --git a/owlbot.py b/owlbot.py index 22ece4829f..a0b6cc8124 100644 --- a/owlbot.py +++ b/owlbot.py @@ -83,13 +83,13 @@ def update_fixup_scripts(library): ) for library in get_staging_dirs(default_version=firestore_default_version, sub_directory="firestore"): - s.move(library / f"google/cloud/firestore_{library.name}", excludes=[f"__init__.py", "**/gapic_version.py"]) + s.move(library / f"google/cloud/firestore_{library.name}", excludes=[f"__init__.py", "**/gapic_version.py", "noxfile.py"]) s.move(library / f"tests/", f"tests") update_fixup_scripts(library) s.move(library / "scripts") for library in get_staging_dirs(default_version=firestore_admin_default_version, sub_directory="firestore_admin"): - s.move(library / f"google/cloud/firestore_admin_{library.name}", excludes=[f"__init__.py", "**/gapic_version.py"]) + s.move(library / f"google/cloud/firestore_admin_{library.name}", excludes=[f"__init__.py", "**/gapic_version.py", "noxfile.py"]) s.move(library / f"tests", f"tests") update_fixup_scripts(library) s.move(library / "scripts") @@ -127,7 +127,7 @@ def update_fixup_scripts(library): s.move( library / f"google/cloud/bundle", f"google/cloud/firestore_bundle", - excludes=["**/gapic_version.py"], + excludes=["**/gapic_version.py", "noxfile.py"], ) s.move(library / f"tests", f"tests") @@ -151,133 +151,6 @@ def update_fixup_scripts(library): python.py_samples(skip_readmes=True) -# ---------------------------------------------------------------------------- -# Customize noxfile.py -# ---------------------------------------------------------------------------- - -def place_before(path, text, *before_text, escape=None): - replacement = "\n".join(before_text) + "\n" + text - if escape: - for c in escape: - text = text.replace(c, '\\' + c) - s.replace([path], text, replacement) - -system_emulated_session = """ -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def system_emulated(session): - import subprocess - import signal - - try: - # https://github.com/googleapis/python-firestore/issues/472 - # Kokoro image doesn't have java installed, don't attempt to run emulator. - subprocess.call(["java", "--version"]) - except OSError: - session.skip("java not found but required for emulator support") - - try: - subprocess.call(["gcloud", "--version"]) - except OSError: - session.skip("gcloud not found but required for emulator support") - - # Currently, CI/CD doesn't have beta component of gcloud. - subprocess.call( - ["gcloud", "components", "install", "beta", "cloud-firestore-emulator",] - ) - - hostport = "localhost:8789" - session.env["FIRESTORE_EMULATOR_HOST"] = hostport - - p = subprocess.Popen( - [ - "gcloud", - "--quiet", - "beta", - "emulators", - "firestore", - "start", - "--host-port", - hostport, - ] - ) - - try: - system(session) - finally: - # Stop Emulator - os.killpg(os.getpgid(p.pid), signal.SIGKILL) - -""" - -place_before( - "noxfile.py", - "@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS)\n" - "def system(session):", - system_emulated_session, - escape="()" -) - -# add system_emulated + mypy nox session -s.replace("noxfile.py", - """nox.options.sessions = \[ - "unit", - "system",""", - """nox.options.sessions = [ - "unit", - "system_emulated", - "system", - "mypy",""", -) - -s.replace( - "noxfile.py", - """\"--quiet\", - f\"--junitxml=system_\{session.python\}_sponge_log.xml\", - system_test""", - """\"--verbose\", - f\"--junitxml=system_{session.python}_sponge_log.xml\", - system_test""", -) - -s.replace( - "noxfile.py", - """\ -BLACK_VERSION = "black\[jupyter\]==23.7.0" -""", - """\ -PYTYPE_VERSION = "pytype==2020.7.24" -BLACK_VERSION = "black[jupyter]==23.7.0" -""", -) - -s.replace( - "noxfile.py", - """\ -@nox.session\(python=DEFAULT_PYTHON_VERSION\) -def lint_setup_py\(session\): -""", - '''\ -@nox.session(python="3.7") -def pytype(session): - """Verify type hints are pytype compatible.""" - session.install(PYTYPE_VERSION) - session.run("pytype",) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def mypy(session): - """Verify type hints are mypy compatible.""" - session.install("-e", ".") - session.install("mypy", "types-setuptools") - # TODO: also verify types on tests, all of google package - session.run("mypy", "-p", "google.cloud.firestore", "--no-incremental") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): -''', -) - s.shell.run(["nox", "-s", "blacken"], hide_output=False) s.replace( @@ -289,5 +162,3 @@ def lint_setup_py(session): # Setup service account credentials.""", ) - -s.replace("noxfile.py", "\"pytest-asyncio\"", "\"pytest-asyncio==0.21.2\"") From d51ac35d59a391f4d0bd61dd66dc988e5ba67b6e Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 17 Apr 2025 10:23:16 -0400 Subject: [PATCH 34/37] chore(main): release 2.20.2 (#1031) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 8 ++++++++ google/cloud/firestore/gapic_version.py | 2 +- google/cloud/firestore_admin_v1/gapic_version.py | 2 +- google/cloud/firestore_bundle/gapic_version.py | 2 +- google/cloud/firestore_v1/gapic_version.py | 2 +- 6 files changed, 13 insertions(+), 5 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index a95c589d8c..eeb4bcda33 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.20.1" + ".": "2.20.2" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 60cd254ed7..b677942c7e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,14 @@ [1]: https://pypi.org/project/google-cloud-firestore/#history +## [2.20.2](https://github.com/googleapis/python-firestore/compare/v2.20.1...v2.20.2) (2025-04-14) + + +### Bug Fixes + +* Allow Protobuf 6.x ([#1028](https://github.com/googleapis/python-firestore/issues/1028)) ([13d5c6d](https://github.com/googleapis/python-firestore/commit/13d5c6d18a3836e3c90b0b63360cb0394fa0375b)) +* Remove setup.cfg configuration for creating universal wheels ([#1030](https://github.com/googleapis/python-firestore/issues/1030)) ([727098b](https://github.com/googleapis/python-firestore/commit/727098b4abf4a616f3ccce4b2476ab930fd5bf5c)) + ## [2.20.1](https://github.com/googleapis/python-firestore/compare/v2.20.0...v2.20.1) (2025-02-26) diff --git a/google/cloud/firestore/gapic_version.py b/google/cloud/firestore/gapic_version.py index 5585b0b1a0..4c1787c538 100644 --- a/google/cloud/firestore/gapic_version.py +++ b/google/cloud/firestore/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.20.1" # {x-release-please-version} +__version__ = "2.20.2" # {x-release-please-version} diff --git a/google/cloud/firestore_admin_v1/gapic_version.py b/google/cloud/firestore_admin_v1/gapic_version.py index 5585b0b1a0..4c1787c538 100644 --- a/google/cloud/firestore_admin_v1/gapic_version.py +++ b/google/cloud/firestore_admin_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.20.1" # {x-release-please-version} +__version__ = "2.20.2" # {x-release-please-version} diff --git a/google/cloud/firestore_bundle/gapic_version.py b/google/cloud/firestore_bundle/gapic_version.py index 5585b0b1a0..4c1787c538 100644 --- a/google/cloud/firestore_bundle/gapic_version.py +++ b/google/cloud/firestore_bundle/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.20.1" # {x-release-please-version} +__version__ = "2.20.2" # {x-release-please-version} diff --git a/google/cloud/firestore_v1/gapic_version.py b/google/cloud/firestore_v1/gapic_version.py index 5585b0b1a0..4c1787c538 100644 --- a/google/cloud/firestore_v1/gapic_version.py +++ b/google/cloud/firestore_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.20.1" # {x-release-please-version} +__version__ = "2.20.2" # {x-release-please-version} From c0606e2dbcae5162b0509fc19bd72afcbc5fb714 Mon Sep 17 00:00:00 2001 From: Kevin Zheng Date: Thu, 30 Jan 2025 17:37:14 +0000 Subject: [PATCH 35/37] feat: Added read_time as a parameter to various calls (synchronous/base classes) --- google/cloud/firestore_v1/aggregation.py | 2 + google/cloud/firestore_v1/base_aggregation.py | 1 + google/cloud/firestore_v1/base_client.py | 2 + google/cloud/firestore_v1/base_collection.py | 1 + google/cloud/firestore_v1/base_document.py | 2 + google/cloud/firestore_v1/base_query.py | 2 + google/cloud/firestore_v1/client.py | 4 +- google/cloud/firestore_v1/collection.py | 4 +- google/cloud/firestore_v1/document.py | 4 +- google/cloud/firestore_v1/query.py | 8 +- tests/system/test_system.py | 78 ++++++++++++++---- tests/unit/v1/test_aggregation.py | 19 +++-- tests/unit/v1/test_client.py | 8 +- tests/unit/v1/test_collection.py | 12 ++- tests/unit/v1/test_cross_language.py | 1 + tests/unit/v1/test_document.py | 14 +++- tests/unit/v1/test_query.py | 81 +++---------------- tests/unit/v1/test_transaction.py | 1 + 18 files changed, 119 insertions(+), 125 deletions(-) diff --git a/google/cloud/firestore_v1/aggregation.py b/google/cloud/firestore_v1/aggregation.py index 73fa813c7c..d816c576f3 100644 --- a/google/cloud/firestore_v1/aggregation.py +++ b/google/cloud/firestore_v1/aggregation.py @@ -22,6 +22,8 @@ import datetime +import datetime + from typing import TYPE_CHECKING, Any, Generator, List, Optional, Union from google.api_core import exceptions, gapic_v1 diff --git a/google/cloud/firestore_v1/base_aggregation.py b/google/cloud/firestore_v1/base_aggregation.py index d99c371094..d8cd32cd04 100644 --- a/google/cloud/firestore_v1/base_aggregation.py +++ b/google/cloud/firestore_v1/base_aggregation.py @@ -214,6 +214,7 @@ def _prep_stream( "parent": parent_path, "structured_aggregation_query": self._to_protobuf(), "transaction": _helpers.get_transaction_id(transaction), + "read_time": read_time, } if explain_options: request["explain_options"] = explain_options._to_dict() diff --git a/google/cloud/firestore_v1/base_client.py b/google/cloud/firestore_v1/base_client.py index c8504f39db..bb1f89ee14 100644 --- a/google/cloud/firestore_v1/base_client.py +++ b/google/cloud/firestore_v1/base_client.py @@ -27,6 +27,7 @@ import datetime import os +from datetime import datetime from typing import ( Any, AsyncGenerator, @@ -448,6 +449,7 @@ def _prep_get_all( "documents": document_paths, "mask": mask, "transaction": _helpers.get_transaction_id(transaction), + "read_time": read_time, } if read_time is not None: request["read_time"] = read_time diff --git a/google/cloud/firestore_v1/base_collection.py b/google/cloud/firestore_v1/base_collection.py index c3411d3c2e..b857b2b49d 100644 --- a/google/cloud/firestore_v1/base_collection.py +++ b/google/cloud/firestore_v1/base_collection.py @@ -217,6 +217,7 @@ def _prep_list_documents( # include any fields. To save on data transfer, we can set a field_path mask # to include no fields "mask": {"field_paths": None}, + "read_time": read_time, } if read_time is not None: request["read_time"] = read_time diff --git a/google/cloud/firestore_v1/base_document.py b/google/cloud/firestore_v1/base_document.py index 921c3aab7e..882bffd56b 100644 --- a/google/cloud/firestore_v1/base_document.py +++ b/google/cloud/firestore_v1/base_document.py @@ -28,6 +28,7 @@ Union, Awaitable, ) +from datetime import datetime from google.api_core import retry as retries @@ -308,6 +309,7 @@ def _prep_batch_get( "documents": [self._document_path], "mask": mask, "transaction": _helpers.get_transaction_id(transaction), + "read_time": read_time, } if read_time is not None: request["read_time"] = read_time diff --git a/google/cloud/firestore_v1/base_query.py b/google/cloud/firestore_v1/base_query.py index 671fd09e1b..c8e5216061 100644 --- a/google/cloud/firestore_v1/base_query.py +++ b/google/cloud/firestore_v1/base_query.py @@ -1060,6 +1060,7 @@ def _prep_stream( "parent": parent_path, "structured_query": self._to_protobuf(), "transaction": _helpers.get_transaction_id(transaction), + "read_time": read_time, } if explain_options is not None: request["explain_options"] = explain_options._to_dict() @@ -1449,6 +1450,7 @@ def _prep_get_partitions( "parent": parent_path, "structured_query": query._to_protobuf(), "partition_count": partition_count, + "read_time": read_time, } if read_time is not None: request["read_time"] = read_time diff --git a/google/cloud/firestore_v1/client.py b/google/cloud/firestore_v1/client.py index efe3f29bd2..d23caaeaaf 100644 --- a/google/cloud/firestore_v1/client.py +++ b/google/cloud/firestore_v1/client.py @@ -203,8 +203,8 @@ def document(self, *document_path: str) -> DocumentReference: def get_all( self, references: list, - field_paths: Iterable[str] | None = None, - transaction: Transaction | None = None, + field_paths: Iterable[str] = None, + transaction: Transaction = None, retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, *, diff --git a/google/cloud/firestore_v1/collection.py b/google/cloud/firestore_v1/collection.py index e37f6ad0fc..e5764cc265 100644 --- a/google/cloud/firestore_v1/collection.py +++ b/google/cloud/firestore_v1/collection.py @@ -163,9 +163,7 @@ def list_documents( collection does not exist at the time of `snapshot`, the iterator will be empty """ - request, kwargs = self._prep_list_documents( - page_size, retry, timeout, read_time - ) + request, kwargs = self._prep_list_documents(page_size, retry, timeout, read_time) iterator = self._client._firestore_api.list_documents( request=request, diff --git a/google/cloud/firestore_v1/document.py b/google/cloud/firestore_v1/document.py index c8248b9b89..014294bb4d 100644 --- a/google/cloud/firestore_v1/document.py +++ b/google/cloud/firestore_v1/document.py @@ -404,9 +404,7 @@ def get( """ from google.cloud.firestore_v1.base_client import _parse_batch_get - request, kwargs = self._prep_batch_get( - field_paths, transaction, retry, timeout, read_time - ) + request, kwargs = self._prep_batch_get(field_paths, transaction, retry, timeout, read_time) response_iter = self._client._firestore_api.batch_get_documents( request=request, diff --git a/google/cloud/firestore_v1/query.py b/google/cloud/firestore_v1/query.py index ad4f89834e..56f30372b5 100644 --- a/google/cloud/firestore_v1/query.py +++ b/google/cloud/firestore_v1/query.py @@ -246,9 +246,7 @@ def _chunkify( ): return - def _get_stream_iterator( - self, transaction, retry, timeout, explain_options=None, read_time=None - ): + def _get_stream_iterator(self, transaction, retry, timeout, explain_options=None, read_time=None): """Helper method for :meth:`stream`.""" request, expected_prefix, kwargs = self._prep_stream( transaction, @@ -626,9 +624,7 @@ def get_partitions( if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp within the past 7 days. For the most accurate results, use UTC timezone. """ - request, kwargs = self._prep_get_partitions( - partition_count, retry, timeout, read_time - ) + request, kwargs = self._prep_get_partitions(partition_count, retry, timeout, read_time) pager = self._client._firestore_api.partition_query( request=request, diff --git a/tests/system/test_system.py b/tests/system/test_system.py index ba72d6ca02..25a5bf8def 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -225,7 +225,9 @@ def test_collections_w_read_time(client, cleanup, database): # Add to clean-up before API request (in case ``create()`` fails). cleanup(first_document.delete) - data = {"status": "new"} + data = { + "status": "new" + } write_result = first_document.create(data) read_time = write_result.update_time num_collections = len(list(client.collections())) @@ -1018,6 +1020,7 @@ def test_document_get(client, cleanup, database): @pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) def test_document_get_w_read_time(client, cleanup, database): + now = datetime.datetime.now(tz=datetime.timezone.utc) document_id = "for-get" + UNIQUE_RESOURCE_ID document = client.document("created", document_id) # Add to clean-up before API request (in case ``create()`` fails). @@ -1026,11 +1029,15 @@ def test_document_get_w_read_time(client, cleanup, database): # First make sure it doesn't exist. assert not document.get().exists - initial_data = {"test": "success"} + initial_data = { + "test": "success" + } write_result = document.create(initial_data) read_time = write_result.update_time - new_data = {"test": "changed"} + new_data = { + "test": "changed" + } document.update(new_data) snapshot = document.get(read_time=read_time) @@ -1586,10 +1593,7 @@ def test_query_stream_w_read_time(query_docs, cleanup, database): # Compare query at read_time to query at current time. query = collection.where(filter=FieldFilter("b", "==", 1)) - values = { - snapshot.id: snapshot.to_dict() - for snapshot in query.stream(read_time=read_time) - } + values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream(read_time=read_time)} assert len(values) == num_vals assert new_ref.id not in values for key, value in values.items(): @@ -1964,10 +1968,9 @@ def test_get_all(client, cleanup, database): document1.update(new_data) document2.create(new_data) document3.update(new_data) + - snapshots = list( - client.get_all([document1, document2, document3], read_time=read_time) - ) + snapshots = list(client.get_all([document1, document2, document3], read_time=read_time)) assert snapshots[0].exists assert snapshots[1].exists assert not snapshots[2].exists @@ -3156,14 +3159,11 @@ def test_aggregation_query_stream_or_get_w_explain_options_analyze_false( @pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) -@pytest.mark.parametrize( - "aggregation_type,expected_value", [("count", 5), ("sum", 100), ("avg", 4.0)] -) -def test_aggregation_queries_with_read_time( - collection, query, cleanup, database, aggregation_type, expected_value -): +@pytest.mark.parametrize("aggregation_type,expected_value", + [("count", 5), ("sum", 100), ("avg", 4.0)]) +def test_aggregation_queries_with_read_time(collection, query, cleanup, database, aggregation_type, expected_value): """ - Ensure that all aggregation queries work when read_time is passed into + Ensure that all aggregation queries work when read_time is passed into a query..().get() method """ # Find the most recent read_time in collections @@ -3448,6 +3448,50 @@ def in_transaction(transaction): assert inner_fn_ran is True +@pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) +def test_query_in_transaction_with_read_time(client, cleanup, database): + """ + Test query profiling in transactions. + """ + collection_id = "doc-create" + UNIQUE_RESOURCE_ID + doc_ids = [f"doc{i}" + UNIQUE_RESOURCE_ID for i in range(5)] + doc_refs = [client.document(collection_id, doc_id) for doc_id in doc_ids] + for doc_ref in doc_refs: + cleanup(doc_ref.delete) + doc_refs[0].create({"a": 1, "b": 2}) + doc_refs[1].create({"a": 1, "b": 1}) + + read_time = max(docref.get().read_time for docref in doc_refs) + doc_refs[2].create({"a": 1, "b": 3}) + + collection = client.collection(collection_id) + query = collection.where(filter=FieldFilter("a", "==", 1)) + + with client.transaction() as transaction: + # should work when transaction is initiated through transactional decorator + @firestore.transactional + def in_transaction(transaction): + global inner_fn_ran + + new_b_values = [docs.get("b") for docs in transaction.get(query, read_time=read_time)] + assert len(new_b_values) == 2 + assert 1 in new_b_values + assert 2 in new_b_values + assert 3 not in new_b_values + + new_b_values = [docs.get("b") for docs in transaction.get(query)] + assert len(new_b_values) == 3 + assert 1 in new_b_values + assert 2 in new_b_values + assert 3 in new_b_values + + inner_fn_ran = True + + in_transaction(transaction) + # make sure we didn't skip assertions in inner function + assert inner_fn_ran is True + + @pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) def test_update_w_uuid(client, cleanup, database): """ diff --git a/tests/unit/v1/test_aggregation.py b/tests/unit/v1/test_aggregation.py index e13c010c37..a35b91709f 100644 --- a/tests/unit/v1/test_aggregation.py +++ b/tests/unit/v1/test_aggregation.py @@ -329,6 +329,7 @@ def test_aggregation_query_prep_stream(): "parent": parent_path, "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": None, + "read_time": None, } assert request == expected_request assert kwargs == {"retry": None} @@ -355,6 +356,7 @@ def test_aggregation_query_prep_stream_with_transaction(): "parent": parent_path, "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": txn_id, + "read_time": None, } assert request == expected_request assert kwargs == {"retry": None} @@ -381,6 +383,7 @@ def test_aggregation_query_prep_stream_with_explain_options(): "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": None, "explain_options": explain_options._to_dict(), + "read_time": None, } assert request == expected_request assert kwargs == {"retry": None} @@ -476,9 +479,7 @@ def _aggregation_query_get_helper( aggregation_query = make_aggregation_query(query) aggregation_query.count(alias="all") - aggregation_result = AggregationResult( - alias="total", value=5, read_time=response_read_time - ) + aggregation_result = AggregationResult(alias="total", value=5, read_time=response_read_time) if explain_options is not None: explain_metrics = {"execution_stats": {"results_returned": 1}} @@ -525,6 +526,7 @@ def _aggregation_query_get_helper( "parent": parent_path, "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": None, + "read_time": query_read_time, } if explain_options is not None: expected_request["explain_options"] = explain_options._to_dict() @@ -548,9 +550,7 @@ def test_aggregation_query_get_with_readtime(): query_read_time = datetime.now(tz=timezone.utc) - timedelta(hours=1) response_read_time = _datetime_to_pb_timestamp(query_read_time) - _aggregation_query_get_helper( - response_read_time=response_read_time, query_read_time=query_read_time - ) + _aggregation_query_get_helper(response_read_time=response_read_time, query_read_time=query_read_time) def test_aggregation_query_get_retry_timeout(): @@ -610,6 +610,7 @@ def test_aggregation_query_get_transaction(): "parent": parent_path, "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": txn_id, + "read_time": None, }, metadata=client._rpc_metadata, **kwargs, @@ -797,7 +798,9 @@ def _aggregation_query_stream_helper( # Execute the query and check the response. returned = aggregation_query.stream( - **kwargs, explain_options=explain_options, read_time=read_time + **kwargs, + explain_options=explain_options, + read_time=read_time ) assert isinstance(returned, StreamGenerator) @@ -825,6 +828,7 @@ def _aggregation_query_stream_helper( "parent": parent_path, "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": None, + "read_time": read_time, } if explain_options is not None: expected_request["explain_options"] = explain_options._to_dict() @@ -917,6 +921,7 @@ def test_aggregation_from_query(): "parent": parent_path, "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": txn_id, + "read_time": None, }, metadata=client._rpc_metadata, **kwargs, diff --git a/tests/unit/v1/test_client.py b/tests/unit/v1/test_client.py index 51a6b7f449..03300d7d5d 100644 --- a/tests/unit/v1/test_client.py +++ b/tests/unit/v1/test_client.py @@ -356,12 +356,7 @@ def _invoke_get_all(client, references, document_pbs, **kwargs): def _get_all_helper( - num_snapshots=2, - txn_id=None, - retry=None, - timeout=None, - database=None, - read_time=None, + num_snapshots=2, txn_id=None, retry=None, timeout=None, database=None, read_time=None ): from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.async_document import DocumentSnapshot @@ -494,6 +489,7 @@ def test_client_get_all_unknown_result(database): "documents": doc_paths, "mask": None, "transaction": None, + "read_time": None, }, metadata=client._rpc_metadata, ) diff --git a/tests/unit/v1/test_collection.py b/tests/unit/v1/test_collection.py index da91651b95..eb669aa4d3 100644 --- a/tests/unit/v1/test_collection.py +++ b/tests/unit/v1/test_collection.py @@ -300,13 +300,11 @@ def _next_page(self): kwargs = _fs_v1_helpers.make_retry_timeout_kwargs(retry, timeout) if page_size is not None: - documents = list( - collection.list_documents( - page_size=page_size, - **kwargs, - read_time=read_time, - ) - ) + documents = list(collection.list_documents( + page_size=page_size, + **kwargs, + read_time=read_time, + )) else: documents = list(collection.list_documents(**kwargs, read_time=read_time)) diff --git a/tests/unit/v1/test_cross_language.py b/tests/unit/v1/test_cross_language.py index d2adeb2ba6..a5ccb16be9 100644 --- a/tests/unit/v1/test_cross_language.py +++ b/tests/unit/v1/test_cross_language.py @@ -154,6 +154,7 @@ def test_get_testprotos(test_proto): "documents": [doc._document_path], "mask": None, "transaction": None, + "read_time": None, } firestore_api.batch_get_documents.assert_called_once_with( diff --git a/tests/unit/v1/test_document.py b/tests/unit/v1/test_document.py index fdb8fdd08b..fe48453e77 100644 --- a/tests/unit/v1/test_document.py +++ b/tests/unit/v1/test_document.py @@ -528,6 +528,10 @@ def test_documentreference_get_with_multiple_field_paths(database): def test_documentreference_get_with_transaction(database): _get_helper(use_transaction=True, database=database) +@pytest.mark.parametrize("database", [None, "somedb"]) +def test_documentreference_get_with_read_time(database): + _get_helper(read_time=DatetimeWithNanoseconds.now(), database=database) + @pytest.mark.parametrize("database", [None, "somedb"]) def test_documentreference_get_with_read_time(database): @@ -535,7 +539,11 @@ def test_documentreference_get_with_read_time(database): def _collections_helper( - page_size=None, retry=None, timeout=None, read_time=None, database=None + page_size=None, + retry=None, + timeout=None, + read_time=None, + database=None ): from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.collection import CollectionReference @@ -557,9 +565,7 @@ def __iter__(self): # Actually make a document and call delete(). document = _make_document_reference("where", "we-are", client=client) if page_size is not None: - collections = list( - document.collections(page_size=page_size, **kwargs, read_time=read_time) - ) + collections = list(document.collections(page_size=page_size, **kwargs, read_time=read_time)) else: collections = list(document.collections(**kwargs, read_time=read_time)) diff --git a/tests/unit/v1/test_query.py b/tests/unit/v1/test_query.py index cb2400b503..948e7bf15c 100644 --- a/tests/unit/v1/test_query.py +++ b/tests/unit/v1/test_query.py @@ -96,6 +96,7 @@ def _query_get_helper( "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, + "read_time": read_time, } if explain_options: request["explain_options"] = explain_options._to_dict() @@ -178,6 +179,7 @@ def test_query_get_limit_to_last(database): "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, + "read_time": None, }, metadata=client._rpc_metadata, ) @@ -379,9 +381,7 @@ def _query_stream_helper( # Execute the query and check the response. query = make_query(parent) - get_response = query.stream( - **kwargs, explain_options=explain_options, read_time=read_time - ) + get_response = query.stream(**kwargs, explain_options=explain_options, read_time=read_time) assert isinstance(get_response, StreamGenerator) returned = list(get_response) @@ -405,6 +405,7 @@ def _query_stream_helper( "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, + "read_time": read_time, } if explain_options is not None: request["explain_options"] = explain_options._to_dict() @@ -499,64 +500,6 @@ def test_query_stream_with_transaction(database): ) -def test_query_stream_w_explain_options(): - from google.cloud.firestore_v1.query_profile import ExplainOptions - - explain_options = ExplainOptions(analyze=True) - _query_stream_helper(explain_options=explain_options) - - -@pytest.mark.parametrize("database", [None, "somedb"]) -def test_query_stream_with_transaction_and_read_time(database): - from google.cloud.firestore_v1.stream_generator import StreamGenerator - - # Create a minimal fake GAPIC. - firestore_api = mock.Mock(spec=["run_query"]) - - # Attach the fake GAPIC to a real client. - client = make_client(database=database) - client._firestore_api_internal = firestore_api - - # Create a real-ish transaction for this client. - transaction = client.transaction() - txn_id = b"\x00\x00\x01-work-\xf2" - transaction._id = txn_id - - # Create a read_time for this client. - read_time = datetime.datetime.now(tz=datetime.timezone.utc) - - # Make a **real** collection reference as parent. - parent = client.collection("declaration") - - # Add a dummy response to the minimal fake GAPIC. - parent_path, expected_prefix = parent._parent_info() - name = "{}/burger".format(expected_prefix) - data = {"lettuce": b"\xee\x87"} - response_pb = _make_query_response(name=name, data=data) - firestore_api.run_query.return_value = iter([response_pb]) - - # Execute the query and check the response. - query = make_query(parent) - get_response = query.stream(transaction=transaction, read_time=read_time) - assert isinstance(get_response, StreamGenerator) - returned = list(get_response) - assert len(returned) == 1 - snapshot = returned[0] - assert snapshot.reference._path == ("declaration", "burger") - assert snapshot.to_dict() == data - - # Verify the mock call. - firestore_api.run_query.assert_called_once_with( - request={ - "parent": parent_path, - "structured_query": query._to_protobuf(), - "transaction": txn_id, - "read_time": read_time, - }, - metadata=client._rpc_metadata, - ) - - @pytest.mark.parametrize("database", [None, "somedb"]) def test_query_stream_no_results(database): from google.cloud.firestore_v1.stream_generator import StreamGenerator @@ -587,6 +530,7 @@ def test_query_stream_no_results(database): "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, + "read_time": None, }, metadata=client._rpc_metadata, ) @@ -622,6 +566,7 @@ def test_query_stream_second_response_in_empty_stream(database): "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, + "read_time": None, }, metadata=client._rpc_metadata, ) @@ -666,6 +611,7 @@ def test_query_stream_with_skipped_results(database): "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, + "read_time": None, }, metadata=client._rpc_metadata, ) @@ -710,6 +656,7 @@ def test_query_stream_empty_after_first_response(database): "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, + "read_time": None, }, metadata=client._rpc_metadata, ) @@ -757,6 +704,7 @@ def test_query_stream_w_collection_group(database): "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, + "read_time": None, }, metadata=client._rpc_metadata, ) @@ -767,12 +715,7 @@ def test_query_stream_w_collection_group(database): def _query_stream_w_retriable_exc_helper( - retry=_not_passed, - timeout=None, - transaction=None, - expect_retry=True, - database=None, - read_time=None, + retry=_not_passed, timeout=None, transaction=None, expect_retry=True, database=None, read_time=None ): from google.api_core import exceptions, gapic_v1 @@ -930,9 +873,7 @@ def test_collection_group_constructor_all_descendents_is_false(): _make_collection_group(mock.sentinel.parent, all_descendants=False) -def _collection_group_get_partitions_helper( - retry=None, timeout=None, database=None, read_time=None -): +def _collection_group_get_partitions_helper(retry=None, timeout=None, database=None, read_time=None): from google.cloud.firestore_v1 import _helpers # Create a minimal fake GAPIC. diff --git a/tests/unit/v1/test_transaction.py b/tests/unit/v1/test_transaction.py index 519a07e67c..de66aeaf99 100644 --- a/tests/unit/v1/test_transaction.py +++ b/tests/unit/v1/test_transaction.py @@ -472,6 +472,7 @@ def _transaction_get_w_query_helper( "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": b"beep-fail-commit", + "read_time": read_time, } if explain_options is not None: request["explain_options"] = explain_options._to_dict() From 6b1bb428daa6e1d7a95178263b1e20e0b2c43883 Mon Sep 17 00:00:00 2001 From: Kevin Zheng Date: Wed, 30 Apr 2025 13:53:21 +0000 Subject: [PATCH 36/37] Fixed unit tests --- google/cloud/firestore_v1/base_aggregation.py | 1 - google/cloud/firestore_v1/base_client.py | 1 - google/cloud/firestore_v1/base_collection.py | 1 - google/cloud/firestore_v1/base_document.py | 1 - google/cloud/firestore_v1/base_query.py | 2 -- tests/unit/v1/test_aggregation.py | 23 +++++++++++-------- tests/unit/v1/test_client.py | 1 - tests/unit/v1/test_cross_language.py | 1 - tests/unit/v1/test_query.py | 8 ------- tests/unit/v1/test_transaction.py | 6 ++++- 10 files changed, 18 insertions(+), 27 deletions(-) diff --git a/google/cloud/firestore_v1/base_aggregation.py b/google/cloud/firestore_v1/base_aggregation.py index d8cd32cd04..d99c371094 100644 --- a/google/cloud/firestore_v1/base_aggregation.py +++ b/google/cloud/firestore_v1/base_aggregation.py @@ -214,7 +214,6 @@ def _prep_stream( "parent": parent_path, "structured_aggregation_query": self._to_protobuf(), "transaction": _helpers.get_transaction_id(transaction), - "read_time": read_time, } if explain_options: request["explain_options"] = explain_options._to_dict() diff --git a/google/cloud/firestore_v1/base_client.py b/google/cloud/firestore_v1/base_client.py index bb1f89ee14..563c4373fc 100644 --- a/google/cloud/firestore_v1/base_client.py +++ b/google/cloud/firestore_v1/base_client.py @@ -449,7 +449,6 @@ def _prep_get_all( "documents": document_paths, "mask": mask, "transaction": _helpers.get_transaction_id(transaction), - "read_time": read_time, } if read_time is not None: request["read_time"] = read_time diff --git a/google/cloud/firestore_v1/base_collection.py b/google/cloud/firestore_v1/base_collection.py index b857b2b49d..c3411d3c2e 100644 --- a/google/cloud/firestore_v1/base_collection.py +++ b/google/cloud/firestore_v1/base_collection.py @@ -217,7 +217,6 @@ def _prep_list_documents( # include any fields. To save on data transfer, we can set a field_path mask # to include no fields "mask": {"field_paths": None}, - "read_time": read_time, } if read_time is not None: request["read_time"] = read_time diff --git a/google/cloud/firestore_v1/base_document.py b/google/cloud/firestore_v1/base_document.py index 882bffd56b..24c0299c0d 100644 --- a/google/cloud/firestore_v1/base_document.py +++ b/google/cloud/firestore_v1/base_document.py @@ -309,7 +309,6 @@ def _prep_batch_get( "documents": [self._document_path], "mask": mask, "transaction": _helpers.get_transaction_id(transaction), - "read_time": read_time, } if read_time is not None: request["read_time"] = read_time diff --git a/google/cloud/firestore_v1/base_query.py b/google/cloud/firestore_v1/base_query.py index c8e5216061..671fd09e1b 100644 --- a/google/cloud/firestore_v1/base_query.py +++ b/google/cloud/firestore_v1/base_query.py @@ -1060,7 +1060,6 @@ def _prep_stream( "parent": parent_path, "structured_query": self._to_protobuf(), "transaction": _helpers.get_transaction_id(transaction), - "read_time": read_time, } if explain_options is not None: request["explain_options"] = explain_options._to_dict() @@ -1450,7 +1449,6 @@ def _prep_get_partitions( "parent": parent_path, "structured_query": query._to_protobuf(), "partition_count": partition_count, - "read_time": read_time, } if read_time is not None: request["read_time"] = read_time diff --git a/tests/unit/v1/test_aggregation.py b/tests/unit/v1/test_aggregation.py index a35b91709f..98817f1d3f 100644 --- a/tests/unit/v1/test_aggregation.py +++ b/tests/unit/v1/test_aggregation.py @@ -329,7 +329,6 @@ def test_aggregation_query_prep_stream(): "parent": parent_path, "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": None, - "read_time": None, } assert request == expected_request assert kwargs == {"retry": None} @@ -356,7 +355,6 @@ def test_aggregation_query_prep_stream_with_transaction(): "parent": parent_path, "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": txn_id, - "read_time": None, } assert request == expected_request assert kwargs == {"retry": None} @@ -383,7 +381,6 @@ def test_aggregation_query_prep_stream_with_explain_options(): "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": None, "explain_options": explain_options._to_dict(), - "read_time": None, } assert request == expected_request assert kwargs == {"retry": None} @@ -526,7 +523,6 @@ def _aggregation_query_get_helper( "parent": parent_path, "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": None, - "read_time": query_read_time, } if explain_options is not None: expected_request["explain_options"] = explain_options._to_dict() @@ -610,7 +606,6 @@ def test_aggregation_query_get_transaction(): "parent": parent_path, "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": txn_id, - "read_time": None, }, metadata=client._rpc_metadata, **kwargs, @@ -669,13 +664,17 @@ def _stream_w_exception(*_args, **_kw): raise retriable_exc firestore_api.run_aggregation_query.side_effect = [_stream_w_exception(), iter([])] - kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) + kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) # Execute the query and check the response. query = make_query(parent) aggregation_query = make_aggregation_query(query) - get_response = aggregation_query.stream(transaction=transaction, **kwargs) + get_response = aggregation_query.stream( + transaction=transaction, + **kwargs, + read_time=read_time, + ) assert isinstance(get_response, stream_generator.StreamGenerator) if expect_retry: @@ -727,7 +726,8 @@ def _stream_w_exception(*_args, **_kw): "transaction": None, } if read_time is not None: - expected_request["read_time"] = None + expected_request["read_time"] = read_time + assert calls[1] == mock.call( request=expected_request, metadata=client._rpc_metadata, @@ -753,6 +753,11 @@ def test_aggregation_query_stream_w_retriable_exc_w_transaction(): _aggregation_query_stream_w_retriable_exc_helper(transaction=txn) +def test_aggregation_query_stream_w_retriable_exc_w_read_time(): + read_time = datetime.now(tz=timezone.utc) + _aggregation_query_stream_w_retriable_exc_helper(read_time=read_time) + + def _aggregation_query_stream_helper( retry=None, timeout=None, @@ -828,7 +833,6 @@ def _aggregation_query_stream_helper( "parent": parent_path, "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": None, - "read_time": read_time, } if explain_options is not None: expected_request["explain_options"] = explain_options._to_dict() @@ -921,7 +925,6 @@ def test_aggregation_from_query(): "parent": parent_path, "structured_aggregation_query": aggregation_query._to_protobuf(), "transaction": txn_id, - "read_time": None, }, metadata=client._rpc_metadata, **kwargs, diff --git a/tests/unit/v1/test_client.py b/tests/unit/v1/test_client.py index 03300d7d5d..4e893184cf 100644 --- a/tests/unit/v1/test_client.py +++ b/tests/unit/v1/test_client.py @@ -489,7 +489,6 @@ def test_client_get_all_unknown_result(database): "documents": doc_paths, "mask": None, "transaction": None, - "read_time": None, }, metadata=client._rpc_metadata, ) diff --git a/tests/unit/v1/test_cross_language.py b/tests/unit/v1/test_cross_language.py index a5ccb16be9..d2adeb2ba6 100644 --- a/tests/unit/v1/test_cross_language.py +++ b/tests/unit/v1/test_cross_language.py @@ -154,7 +154,6 @@ def test_get_testprotos(test_proto): "documents": [doc._document_path], "mask": None, "transaction": None, - "read_time": None, } firestore_api.batch_get_documents.assert_called_once_with( diff --git a/tests/unit/v1/test_query.py b/tests/unit/v1/test_query.py index 948e7bf15c..5e2532a5b7 100644 --- a/tests/unit/v1/test_query.py +++ b/tests/unit/v1/test_query.py @@ -96,7 +96,6 @@ def _query_get_helper( "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, - "read_time": read_time, } if explain_options: request["explain_options"] = explain_options._to_dict() @@ -179,7 +178,6 @@ def test_query_get_limit_to_last(database): "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, - "read_time": None, }, metadata=client._rpc_metadata, ) @@ -405,7 +403,6 @@ def _query_stream_helper( "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, - "read_time": read_time, } if explain_options is not None: request["explain_options"] = explain_options._to_dict() @@ -530,7 +527,6 @@ def test_query_stream_no_results(database): "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, - "read_time": None, }, metadata=client._rpc_metadata, ) @@ -566,7 +562,6 @@ def test_query_stream_second_response_in_empty_stream(database): "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, - "read_time": None, }, metadata=client._rpc_metadata, ) @@ -611,7 +606,6 @@ def test_query_stream_with_skipped_results(database): "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, - "read_time": None, }, metadata=client._rpc_metadata, ) @@ -656,7 +650,6 @@ def test_query_stream_empty_after_first_response(database): "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, - "read_time": None, }, metadata=client._rpc_metadata, ) @@ -704,7 +697,6 @@ def test_query_stream_w_collection_group(database): "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": None, - "read_time": None, }, metadata=client._rpc_metadata, ) diff --git a/tests/unit/v1/test_transaction.py b/tests/unit/v1/test_transaction.py index de66aeaf99..e31f3953d1 100644 --- a/tests/unit/v1/test_transaction.py +++ b/tests/unit/v1/test_transaction.py @@ -399,6 +399,11 @@ def test_transaction_get_w_document_ref_w_explain_options(): ) +def test_transaction_get_w_document_ref_w_read_time(): + read_time = datetime.datetime.now(tz=datetime.timezone.utc) + _transaction_get_w_document_ref_helper(read_time=read_time) + + def _transaction_get_w_query_helper( retry=None, timeout=None, @@ -472,7 +477,6 @@ def _transaction_get_w_query_helper( "parent": parent_path, "structured_query": query._to_protobuf(), "transaction": b"beep-fail-commit", - "read_time": read_time, } if explain_options is not None: request["explain_options"] = explain_options._to_dict() From e33292c365d50d80d5004d14ffff6f828fe95271 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Wed, 30 Apr 2025 16:01:34 +0000 Subject: [PATCH 37/37] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20?= =?UTF-8?q?post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- google/cloud/firestore_v1/collection.py | 4 ++- google/cloud/firestore_v1/document.py | 4 ++- google/cloud/firestore_v1/query.py | 8 ++++-- tests/system/test_system.py | 37 +++++++++++++------------ tests/unit/v1/test_aggregation.py | 14 ++++++---- tests/unit/v1/test_client.py | 7 ++++- tests/unit/v1/test_collection.py | 12 ++++---- tests/unit/v1/test_document.py | 11 ++++---- tests/unit/v1/test_query.py | 15 ++++++++-- 9 files changed, 70 insertions(+), 42 deletions(-) diff --git a/google/cloud/firestore_v1/collection.py b/google/cloud/firestore_v1/collection.py index e5764cc265..e37f6ad0fc 100644 --- a/google/cloud/firestore_v1/collection.py +++ b/google/cloud/firestore_v1/collection.py @@ -163,7 +163,9 @@ def list_documents( collection does not exist at the time of `snapshot`, the iterator will be empty """ - request, kwargs = self._prep_list_documents(page_size, retry, timeout, read_time) + request, kwargs = self._prep_list_documents( + page_size, retry, timeout, read_time + ) iterator = self._client._firestore_api.list_documents( request=request, diff --git a/google/cloud/firestore_v1/document.py b/google/cloud/firestore_v1/document.py index 014294bb4d..c8248b9b89 100644 --- a/google/cloud/firestore_v1/document.py +++ b/google/cloud/firestore_v1/document.py @@ -404,7 +404,9 @@ def get( """ from google.cloud.firestore_v1.base_client import _parse_batch_get - request, kwargs = self._prep_batch_get(field_paths, transaction, retry, timeout, read_time) + request, kwargs = self._prep_batch_get( + field_paths, transaction, retry, timeout, read_time + ) response_iter = self._client._firestore_api.batch_get_documents( request=request, diff --git a/google/cloud/firestore_v1/query.py b/google/cloud/firestore_v1/query.py index 56f30372b5..ad4f89834e 100644 --- a/google/cloud/firestore_v1/query.py +++ b/google/cloud/firestore_v1/query.py @@ -246,7 +246,9 @@ def _chunkify( ): return - def _get_stream_iterator(self, transaction, retry, timeout, explain_options=None, read_time=None): + def _get_stream_iterator( + self, transaction, retry, timeout, explain_options=None, read_time=None + ): """Helper method for :meth:`stream`.""" request, expected_prefix, kwargs = self._prep_stream( transaction, @@ -624,7 +626,9 @@ def get_partitions( if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp within the past 7 days. For the most accurate results, use UTC timezone. """ - request, kwargs = self._prep_get_partitions(partition_count, retry, timeout, read_time) + request, kwargs = self._prep_get_partitions( + partition_count, retry, timeout, read_time + ) pager = self._client._firestore_api.partition_query( request=request, diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 25a5bf8def..d826f738a3 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -225,9 +225,7 @@ def test_collections_w_read_time(client, cleanup, database): # Add to clean-up before API request (in case ``create()`` fails). cleanup(first_document.delete) - data = { - "status": "new" - } + data = {"status": "new"} write_result = first_document.create(data) read_time = write_result.update_time num_collections = len(list(client.collections())) @@ -1029,15 +1027,11 @@ def test_document_get_w_read_time(client, cleanup, database): # First make sure it doesn't exist. assert not document.get().exists - initial_data = { - "test": "success" - } + initial_data = {"test": "success"} write_result = document.create(initial_data) read_time = write_result.update_time - new_data = { - "test": "changed" - } + new_data = {"test": "changed"} document.update(new_data) snapshot = document.get(read_time=read_time) @@ -1593,7 +1587,10 @@ def test_query_stream_w_read_time(query_docs, cleanup, database): # Compare query at read_time to query at current time. query = collection.where(filter=FieldFilter("b", "==", 1)) - values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream(read_time=read_time)} + values = { + snapshot.id: snapshot.to_dict() + for snapshot in query.stream(read_time=read_time) + } assert len(values) == num_vals assert new_ref.id not in values for key, value in values.items(): @@ -1968,9 +1965,10 @@ def test_get_all(client, cleanup, database): document1.update(new_data) document2.create(new_data) document3.update(new_data) - - snapshots = list(client.get_all([document1, document2, document3], read_time=read_time)) + snapshots = list( + client.get_all([document1, document2, document3], read_time=read_time) + ) assert snapshots[0].exists assert snapshots[1].exists assert not snapshots[2].exists @@ -3159,11 +3157,14 @@ def test_aggregation_query_stream_or_get_w_explain_options_analyze_false( @pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) -@pytest.mark.parametrize("aggregation_type,expected_value", - [("count", 5), ("sum", 100), ("avg", 4.0)]) -def test_aggregation_queries_with_read_time(collection, query, cleanup, database, aggregation_type, expected_value): +@pytest.mark.parametrize( + "aggregation_type,expected_value", [("count", 5), ("sum", 100), ("avg", 4.0)] +) +def test_aggregation_queries_with_read_time( + collection, query, cleanup, database, aggregation_type, expected_value +): """ - Ensure that all aggregation queries work when read_time is passed into + Ensure that all aggregation queries work when read_time is passed into a query..().get() method """ # Find the most recent read_time in collections @@ -3473,7 +3474,9 @@ def test_query_in_transaction_with_read_time(client, cleanup, database): def in_transaction(transaction): global inner_fn_ran - new_b_values = [docs.get("b") for docs in transaction.get(query, read_time=read_time)] + new_b_values = [ + docs.get("b") for docs in transaction.get(query, read_time=read_time) + ] assert len(new_b_values) == 2 assert 1 in new_b_values assert 2 in new_b_values diff --git a/tests/unit/v1/test_aggregation.py b/tests/unit/v1/test_aggregation.py index 98817f1d3f..d428e842c7 100644 --- a/tests/unit/v1/test_aggregation.py +++ b/tests/unit/v1/test_aggregation.py @@ -476,7 +476,9 @@ def _aggregation_query_get_helper( aggregation_query = make_aggregation_query(query) aggregation_query.count(alias="all") - aggregation_result = AggregationResult(alias="total", value=5, read_time=response_read_time) + aggregation_result = AggregationResult( + alias="total", value=5, read_time=response_read_time + ) if explain_options is not None: explain_metrics = {"execution_stats": {"results_returned": 1}} @@ -546,7 +548,9 @@ def test_aggregation_query_get_with_readtime(): query_read_time = datetime.now(tz=timezone.utc) - timedelta(hours=1) response_read_time = _datetime_to_pb_timestamp(query_read_time) - _aggregation_query_get_helper(response_read_time=response_read_time, query_read_time=query_read_time) + _aggregation_query_get_helper( + response_read_time=response_read_time, query_read_time=query_read_time + ) def test_aggregation_query_get_retry_timeout(): @@ -664,7 +668,7 @@ def _stream_w_exception(*_args, **_kw): raise retriable_exc firestore_api.run_aggregation_query.side_effect = [_stream_w_exception(), iter([])] - kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) + kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) # Execute the query and check the response. query = make_query(parent) @@ -803,9 +807,7 @@ def _aggregation_query_stream_helper( # Execute the query and check the response. returned = aggregation_query.stream( - **kwargs, - explain_options=explain_options, - read_time=read_time + **kwargs, explain_options=explain_options, read_time=read_time ) assert isinstance(returned, StreamGenerator) diff --git a/tests/unit/v1/test_client.py b/tests/unit/v1/test_client.py index 4e893184cf..51a6b7f449 100644 --- a/tests/unit/v1/test_client.py +++ b/tests/unit/v1/test_client.py @@ -356,7 +356,12 @@ def _invoke_get_all(client, references, document_pbs, **kwargs): def _get_all_helper( - num_snapshots=2, txn_id=None, retry=None, timeout=None, database=None, read_time=None + num_snapshots=2, + txn_id=None, + retry=None, + timeout=None, + database=None, + read_time=None, ): from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.async_document import DocumentSnapshot diff --git a/tests/unit/v1/test_collection.py b/tests/unit/v1/test_collection.py index eb669aa4d3..da91651b95 100644 --- a/tests/unit/v1/test_collection.py +++ b/tests/unit/v1/test_collection.py @@ -300,11 +300,13 @@ def _next_page(self): kwargs = _fs_v1_helpers.make_retry_timeout_kwargs(retry, timeout) if page_size is not None: - documents = list(collection.list_documents( - page_size=page_size, - **kwargs, - read_time=read_time, - )) + documents = list( + collection.list_documents( + page_size=page_size, + **kwargs, + read_time=read_time, + ) + ) else: documents = list(collection.list_documents(**kwargs, read_time=read_time)) diff --git a/tests/unit/v1/test_document.py b/tests/unit/v1/test_document.py index fe48453e77..688e01e2d9 100644 --- a/tests/unit/v1/test_document.py +++ b/tests/unit/v1/test_document.py @@ -528,6 +528,7 @@ def test_documentreference_get_with_multiple_field_paths(database): def test_documentreference_get_with_transaction(database): _get_helper(use_transaction=True, database=database) + @pytest.mark.parametrize("database", [None, "somedb"]) def test_documentreference_get_with_read_time(database): _get_helper(read_time=DatetimeWithNanoseconds.now(), database=database) @@ -539,11 +540,7 @@ def test_documentreference_get_with_read_time(database): def _collections_helper( - page_size=None, - retry=None, - timeout=None, - read_time=None, - database=None + page_size=None, retry=None, timeout=None, read_time=None, database=None ): from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.collection import CollectionReference @@ -565,7 +562,9 @@ def __iter__(self): # Actually make a document and call delete(). document = _make_document_reference("where", "we-are", client=client) if page_size is not None: - collections = list(document.collections(page_size=page_size, **kwargs, read_time=read_time)) + collections = list( + document.collections(page_size=page_size, **kwargs, read_time=read_time) + ) else: collections = list(document.collections(**kwargs, read_time=read_time)) diff --git a/tests/unit/v1/test_query.py b/tests/unit/v1/test_query.py index 5e2532a5b7..25bfe7ad1f 100644 --- a/tests/unit/v1/test_query.py +++ b/tests/unit/v1/test_query.py @@ -379,7 +379,9 @@ def _query_stream_helper( # Execute the query and check the response. query = make_query(parent) - get_response = query.stream(**kwargs, explain_options=explain_options, read_time=read_time) + get_response = query.stream( + **kwargs, explain_options=explain_options, read_time=read_time + ) assert isinstance(get_response, StreamGenerator) returned = list(get_response) @@ -707,7 +709,12 @@ def test_query_stream_w_collection_group(database): def _query_stream_w_retriable_exc_helper( - retry=_not_passed, timeout=None, transaction=None, expect_retry=True, database=None, read_time=None + retry=_not_passed, + timeout=None, + transaction=None, + expect_retry=True, + database=None, + read_time=None, ): from google.api_core import exceptions, gapic_v1 @@ -865,7 +872,9 @@ def test_collection_group_constructor_all_descendents_is_false(): _make_collection_group(mock.sentinel.parent, all_descendants=False) -def _collection_group_get_partitions_helper(retry=None, timeout=None, database=None, read_time=None): +def _collection_group_get_partitions_helper( + retry=None, timeout=None, database=None, read_time=None +): from google.cloud.firestore_v1 import _helpers # Create a minimal fake GAPIC.