From bfd1eb3ac32b766f15d2779114b551af50f88be6 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 22 Sep 2020 16:15:00 -0600 Subject: [PATCH 1/7] chore: add default CODEOWNERS (#34) --- .github/CODEOWNERS | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 .github/CODEOWNERS diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..30c3973 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,11 @@ +# Code owners file. +# This file controls who is tagged for review for any given pull request. +# +# For syntax help see: +# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax + +# The @googleapis/yoshi-python is the default owner for changes in this repo +* @googleapis/yoshi-python + +# The python-samples-reviewers team is the default owner for samples changes +/samples/ @googleapis/python-samples-owners \ No newline at end of file From 66eecbbddf3ed0c6faaee6c778d561c0ba6e53e2 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 24 Nov 2020 14:45:24 -0800 Subject: [PATCH 2/7] chore: Updates to typing information #35 --- .../services/cloud_redis/async_client.py | 4 +- .../redis_v1/services/cloud_redis/client.py | 22 ++-- .../services/cloud_redis/async_client.py | 20 +-- .../services/cloud_redis/client.py | 38 +++--- synth.metadata | 117 +++++++++++++++++- tests/unit/gapic/redis_v1/test_cloud_redis.py | 2 +- .../gapic/redis_v1beta1/test_cloud_redis.py | 4 +- 7 files changed, 161 insertions(+), 46 deletions(-) diff --git a/google/cloud/redis_v1/services/cloud_redis/async_client.py b/google/cloud/redis_v1/services/cloud_redis/async_client.py index 87e7a30..abe09f0 100644 --- a/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -29,8 +29,8 @@ from google.auth import credentials # type: ignore from google.oauth2 import service_account # type: ignore -from google.api_core import operation -from google.api_core import operation_async +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.redis_v1.services.cloud_redis import pagers from google.cloud.redis_v1.types import cloud_redis from google.protobuf import empty_pb2 as empty # type: ignore diff --git a/google/cloud/redis_v1/services/cloud_redis/client.py b/google/cloud/redis_v1/services/cloud_redis/client.py index b09d1ae..899e7ed 100644 --- a/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/google/cloud/redis_v1/services/cloud_redis/client.py @@ -20,10 +20,10 @@ from distutils import util import os import re -from typing import Callable, Dict, Sequence, Tuple, Type, Union +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import client_options as client_options_lib # type: ignore from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore @@ -33,8 +33,8 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -from google.api_core import operation -from google.api_core import operation_async +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.redis_v1.services.cloud_redis import pagers from google.cloud.redis_v1.types import cloud_redis from google.protobuf import empty_pb2 as empty # type: ignore @@ -174,9 +174,9 @@ def parse_instance_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: credentials.Credentials = None, - transport: Union[str, CloudRedisTransport] = None, - client_options: ClientOptions = None, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, CloudRedisTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the cloud redis client. @@ -190,8 +190,8 @@ def __init__( transport (Union[str, ~.CloudRedisTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. + client_options (client_options_lib.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: @@ -217,9 +217,9 @@ def __init__( creation failed for any reason. """ if isinstance(client_options, dict): - client_options = ClientOptions.from_dict(client_options) + client_options = client_options_lib.from_dict(client_options) if client_options is None: - client_options = ClientOptions.ClientOptions() + client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. use_client_cert = bool( diff --git a/google/cloud/redis_v1beta1/services/cloud_redis/async_client.py b/google/cloud/redis_v1beta1/services/cloud_redis/async_client.py index 49b248a..72bb315 100644 --- a/google/cloud/redis_v1beta1/services/cloud_redis/async_client.py +++ b/google/cloud/redis_v1beta1/services/cloud_redis/async_client.py @@ -29,11 +29,11 @@ from google.auth import credentials # type: ignore from google.oauth2 import service_account # type: ignore -from google.api_core import operation -from google.api_core import operation_async +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.redis_v1beta1.services.cloud_redis import pagers from google.cloud.redis_v1beta1.types import cloud_redis -from google.protobuf import any_pb2 as any # type: ignore +from google.protobuf import any_pb2 as gp_any # type: ignore from google.protobuf import empty_pb2 as empty # type: ignore from google.protobuf import field_mask_pb2 as field_mask # type: ignore from google.protobuf import timestamp_pb2 as timestamp # type: ignore @@ -406,7 +406,7 @@ async def create_instance( response, self._client._transport.operations_client, cloud_redis.Instance, - metadata_type=any.Any, + metadata_type=gp_any.Any, ) # Done; return the response. @@ -511,7 +511,7 @@ async def update_instance( response, self._client._transport.operations_client, cloud_redis.Instance, - metadata_type=any.Any, + metadata_type=gp_any.Any, ) # Done; return the response. @@ -604,7 +604,7 @@ async def upgrade_instance( response, self._client._transport.operations_client, cloud_redis.Instance, - metadata_type=any.Any, + metadata_type=gp_any.Any, ) # Done; return the response. @@ -704,7 +704,7 @@ async def import_instance( response, self._client._transport.operations_client, cloud_redis.Instance, - metadata_type=any.Any, + metadata_type=gp_any.Any, ) # Done; return the response. @@ -800,7 +800,7 @@ async def export_instance( response, self._client._transport.operations_client, cloud_redis.Instance, - metadata_type=any.Any, + metadata_type=gp_any.Any, ) # Done; return the response. @@ -895,7 +895,7 @@ async def failover_instance( response, self._client._transport.operations_client, cloud_redis.Instance, - metadata_type=any.Any, + metadata_type=gp_any.Any, ) # Done; return the response. @@ -991,7 +991,7 @@ async def delete_instance( response, self._client._transport.operations_client, empty.Empty, - metadata_type=any.Any, + metadata_type=gp_any.Any, ) # Done; return the response. diff --git a/google/cloud/redis_v1beta1/services/cloud_redis/client.py b/google/cloud/redis_v1beta1/services/cloud_redis/client.py index e9ef707..5b075bd 100644 --- a/google/cloud/redis_v1beta1/services/cloud_redis/client.py +++ b/google/cloud/redis_v1beta1/services/cloud_redis/client.py @@ -20,10 +20,10 @@ from distutils import util import os import re -from typing import Callable, Dict, Sequence, Tuple, Type, Union +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import client_options as client_options_lib # type: ignore from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore @@ -33,11 +33,11 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -from google.api_core import operation -from google.api_core import operation_async +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.redis_v1beta1.services.cloud_redis import pagers from google.cloud.redis_v1beta1.types import cloud_redis -from google.protobuf import any_pb2 as any # type: ignore +from google.protobuf import any_pb2 as gp_any # type: ignore from google.protobuf import empty_pb2 as empty # type: ignore from google.protobuf import field_mask_pb2 as field_mask # type: ignore from google.protobuf import timestamp_pb2 as timestamp # type: ignore @@ -175,9 +175,9 @@ def parse_instance_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: credentials.Credentials = None, - transport: Union[str, CloudRedisTransport] = None, - client_options: ClientOptions = None, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, CloudRedisTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the cloud redis client. @@ -191,8 +191,8 @@ def __init__( transport (Union[str, ~.CloudRedisTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. + client_options (client_options_lib.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: @@ -218,9 +218,9 @@ def __init__( creation failed for any reason. """ if isinstance(client_options, dict): - client_options = ClientOptions.from_dict(client_options) + client_options = client_options_lib.from_dict(client_options) if client_options is None: - client_options = ClientOptions.ClientOptions() + client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. use_client_cert = bool( @@ -571,7 +571,7 @@ def create_instance( response, self._transport.operations_client, cloud_redis.Instance, - metadata_type=any.Any, + metadata_type=gp_any.Any, ) # Done; return the response. @@ -678,7 +678,7 @@ def update_instance( response, self._transport.operations_client, cloud_redis.Instance, - metadata_type=any.Any, + metadata_type=gp_any.Any, ) # Done; return the response. @@ -773,7 +773,7 @@ def upgrade_instance( response, self._transport.operations_client, cloud_redis.Instance, - metadata_type=any.Any, + metadata_type=gp_any.Any, ) # Done; return the response. @@ -875,7 +875,7 @@ def import_instance( response, self._transport.operations_client, cloud_redis.Instance, - metadata_type=any.Any, + metadata_type=gp_any.Any, ) # Done; return the response. @@ -973,7 +973,7 @@ def export_instance( response, self._transport.operations_client, cloud_redis.Instance, - metadata_type=any.Any, + metadata_type=gp_any.Any, ) # Done; return the response. @@ -1070,7 +1070,7 @@ def failover_instance( response, self._transport.operations_client, cloud_redis.Instance, - metadata_type=any.Any, + metadata_type=gp_any.Any, ) # Done; return the response. @@ -1168,7 +1168,7 @@ def delete_instance( response, self._transport.operations_client, empty.Empty, - metadata_type=any.Any, + metadata_type=gp_any.Any, ) # Done; return the response. diff --git a/synth.metadata b/synth.metadata index 92c1159..932165a 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,7 +4,15 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-redis.git", - "sha": "0b3f2c075728a6ec4d5d503d010de229ed1ef725" + "sha": "bfd1eb3ac32b766f15d2779114b551af50f88be6" + } + }, + { + "git": { + "name": "googleapis", + "remote": "https://github.com/googleapis/googleapis.git", + "sha": "3dbeac0d54125b123c8dfd39c774b37473c36944", + "internalRef": "333159182" } }, { @@ -34,5 +42,112 @@ "generator": "bazel" } } + ], + "generatedFiles": [ + ".flake8", + ".github/CONTRIBUTING.md", + ".github/ISSUE_TEMPLATE/bug_report.md", + ".github/ISSUE_TEMPLATE/feature_request.md", + ".github/ISSUE_TEMPLATE/support_request.md", + ".github/PULL_REQUEST_TEMPLATE.md", + ".github/release-please.yml", + ".github/snippet-bot.yml", + ".gitignore", + ".kokoro/build.sh", + ".kokoro/continuous/common.cfg", + ".kokoro/continuous/continuous.cfg", + ".kokoro/docker/docs/Dockerfile", + ".kokoro/docker/docs/fetch_gpg_keys.sh", + ".kokoro/docs/common.cfg", + ".kokoro/docs/docs-presubmit.cfg", + ".kokoro/docs/docs.cfg", + ".kokoro/presubmit/common.cfg", + ".kokoro/presubmit/presubmit.cfg", + ".kokoro/publish-docs.sh", + ".kokoro/release.sh", + ".kokoro/release/common.cfg", + ".kokoro/release/release.cfg", + ".kokoro/samples/lint/common.cfg", + ".kokoro/samples/lint/continuous.cfg", + ".kokoro/samples/lint/periodic.cfg", + ".kokoro/samples/lint/presubmit.cfg", + ".kokoro/samples/python3.6/common.cfg", + ".kokoro/samples/python3.6/continuous.cfg", + ".kokoro/samples/python3.6/periodic.cfg", + ".kokoro/samples/python3.6/presubmit.cfg", + ".kokoro/samples/python3.7/common.cfg", + ".kokoro/samples/python3.7/continuous.cfg", + ".kokoro/samples/python3.7/periodic.cfg", + ".kokoro/samples/python3.7/presubmit.cfg", + ".kokoro/samples/python3.8/common.cfg", + ".kokoro/samples/python3.8/continuous.cfg", + ".kokoro/samples/python3.8/periodic.cfg", + ".kokoro/samples/python3.8/presubmit.cfg", + ".kokoro/test-samples.sh", + ".kokoro/trampoline.sh", + ".kokoro/trampoline_v2.sh", + ".trampolinerc", + "CODE_OF_CONDUCT.md", + "CONTRIBUTING.rst", + "LICENSE", + "MANIFEST.in", + "docs/_static/custom.css", + "docs/_templates/layout.html", + "docs/conf.py", + "docs/multiprocessing.rst", + "docs/redis_v1/services.rst", + "docs/redis_v1/types.rst", + "docs/redis_v1beta1/services.rst", + "docs/redis_v1beta1/types.rst", + "google/cloud/redis/__init__.py", + "google/cloud/redis/py.typed", + "google/cloud/redis_v1/__init__.py", + "google/cloud/redis_v1/proto/cloud_redis.proto", + "google/cloud/redis_v1/py.typed", + "google/cloud/redis_v1/services/__init__.py", + "google/cloud/redis_v1/services/cloud_redis/__init__.py", + "google/cloud/redis_v1/services/cloud_redis/async_client.py", + "google/cloud/redis_v1/services/cloud_redis/client.py", + "google/cloud/redis_v1/services/cloud_redis/pagers.py", + "google/cloud/redis_v1/services/cloud_redis/transports/__init__.py", + "google/cloud/redis_v1/services/cloud_redis/transports/base.py", + "google/cloud/redis_v1/services/cloud_redis/transports/grpc.py", + "google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py", + "google/cloud/redis_v1/types/__init__.py", + "google/cloud/redis_v1/types/cloud_redis.py", + "google/cloud/redis_v1beta1/__init__.py", + "google/cloud/redis_v1beta1/proto/cloud_redis.proto", + "google/cloud/redis_v1beta1/py.typed", + "google/cloud/redis_v1beta1/services/__init__.py", + "google/cloud/redis_v1beta1/services/cloud_redis/__init__.py", + "google/cloud/redis_v1beta1/services/cloud_redis/async_client.py", + "google/cloud/redis_v1beta1/services/cloud_redis/client.py", + "google/cloud/redis_v1beta1/services/cloud_redis/pagers.py", + "google/cloud/redis_v1beta1/services/cloud_redis/transports/__init__.py", + "google/cloud/redis_v1beta1/services/cloud_redis/transports/base.py", + "google/cloud/redis_v1beta1/services/cloud_redis/transports/grpc.py", + "google/cloud/redis_v1beta1/services/cloud_redis/transports/grpc_asyncio.py", + "google/cloud/redis_v1beta1/types/__init__.py", + "google/cloud/redis_v1beta1/types/cloud_redis.py", + "mypy.ini", + "noxfile.py", + "renovate.json", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "scripts/decrypt-secrets.sh", + "scripts/fixup_redis_v1_keywords.py", + "scripts/fixup_redis_v1beta1_keywords.py", + "scripts/readme-gen/readme_gen.py", + "scripts/readme-gen/templates/README.tmpl.rst", + "scripts/readme-gen/templates/auth.tmpl.rst", + "scripts/readme-gen/templates/auth_api_key.tmpl.rst", + "scripts/readme-gen/templates/install_deps.tmpl.rst", + "scripts/readme-gen/templates/install_portaudio.tmpl.rst", + "setup.cfg", + "testing/.gitignore", + "tests/unit/gapic/redis_v1/__init__.py", + "tests/unit/gapic/redis_v1/test_cloud_redis.py", + "tests/unit/gapic/redis_v1beta1/__init__.py", + "tests/unit/gapic/redis_v1beta1/test_cloud_redis.py" ] } \ No newline at end of file diff --git a/tests/unit/gapic/redis_v1/test_cloud_redis.py b/tests/unit/gapic/redis_v1/test_cloud_redis.py index ad8595e..449fb13 100644 --- a/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -31,7 +31,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.api_core import operation_async +from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 from google.auth import credentials from google.auth.exceptions import MutualTLSChannelError diff --git a/tests/unit/gapic/redis_v1beta1/test_cloud_redis.py b/tests/unit/gapic/redis_v1beta1/test_cloud_redis.py index b391a5e..1b1a26b 100644 --- a/tests/unit/gapic/redis_v1beta1/test_cloud_redis.py +++ b/tests/unit/gapic/redis_v1beta1/test_cloud_redis.py @@ -31,7 +31,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.api_core import operation_async +from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 from google.auth import credentials from google.auth.exceptions import MutualTLSChannelError @@ -42,7 +42,7 @@ from google.cloud.redis_v1beta1.types import cloud_redis from google.longrunning import operations_pb2 from google.oauth2 import service_account -from google.protobuf import any_pb2 as any # type: ignore +from google.protobuf import any_pb2 as gp_any # type: ignore from google.protobuf import field_mask_pb2 as field_mask # type: ignore from google.protobuf import timestamp_pb2 as timestamp # type: ignore From add073580f4734289adc03abe8a44657c8686951 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 22 Dec 2020 15:29:17 -0800 Subject: [PATCH 3/7] test: add 3.9 unit tests --- noxfile.py | 2 +- synth.metadata | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/noxfile.py b/noxfile.py index f04f906..379afd7 100644 --- a/noxfile.py +++ b/noxfile.py @@ -28,7 +28,7 @@ DEFAULT_PYTHON_VERSION = "3.8" SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] -UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] @nox.session(python=DEFAULT_PYTHON_VERSION) diff --git a/synth.metadata b/synth.metadata index 932165a..ce9a452 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-redis.git", - "sha": "bfd1eb3ac32b766f15d2779114b551af50f88be6" + "sha": "66eecbbddf3ed0c6faaee6c778d561c0ba6e53e2" } }, { From f3f1a86a2f14ceeaf22362387b397d9b3f880684 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 23 Dec 2020 13:32:02 -0800 Subject: [PATCH 4/7] feat: add common resource helpers; expose client transport; remove send/recv gRPC limits (#38) --- docs/redis_v1/types.rst | 1 + docs/redis_v1beta1/types.rst | 1 + .../services/cloud_redis/async_client.py | 59 ++- .../redis_v1/services/cloud_redis/client.py | 76 +++- .../cloud_redis/transports/__init__.py | 1 - .../services/cloud_redis/transports/grpc.py | 37 +- .../cloud_redis/transports/grpc_asyncio.py | 19 +- google/cloud/redis_v1/types/__init__.py | 1 - google/cloud/redis_v1/types/cloud_redis.py | 14 +- .../services/cloud_redis/async_client.py | 59 ++- .../services/cloud_redis/client.py | 76 +++- .../cloud_redis/transports/__init__.py | 1 - .../services/cloud_redis/transports/grpc.py | 37 +- .../cloud_redis/transports/grpc_asyncio.py | 19 +- google/cloud/redis_v1beta1/types/__init__.py | 1 - .../cloud/redis_v1beta1/types/cloud_redis.py | 14 +- scripts/fixup_redis_v1_keywords.py | 1 + scripts/fixup_redis_v1beta1_keywords.py | 1 + synth.metadata | 6 +- tests/unit/gapic/redis_v1/test_cloud_redis.py | 423 +++++++++++------- .../gapic/redis_v1beta1/test_cloud_redis.py | 423 +++++++++++------- 21 files changed, 883 insertions(+), 387 deletions(-) diff --git a/docs/redis_v1/types.rst b/docs/redis_v1/types.rst index babee02..7eb7c77 100644 --- a/docs/redis_v1/types.rst +++ b/docs/redis_v1/types.rst @@ -3,3 +3,4 @@ Types for Google Cloud Redis v1 API .. automodule:: google.cloud.redis_v1.types :members: + :show-inheritance: diff --git a/docs/redis_v1beta1/types.rst b/docs/redis_v1beta1/types.rst index e552277..4306941 100644 --- a/docs/redis_v1beta1/types.rst +++ b/docs/redis_v1beta1/types.rst @@ -3,3 +3,4 @@ Types for Google Cloud Redis v1beta1 API .. automodule:: google.cloud.redis_v1beta1.types :members: + :show-inheritance: diff --git a/google/cloud/redis_v1/services/cloud_redis/async_client.py b/google/cloud/redis_v1/services/cloud_redis/async_client.py index abe09f0..1553c6c 100644 --- a/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -74,9 +74,41 @@ class CloudRedisAsyncClient: instance_path = staticmethod(CloudRedisClient.instance_path) parse_instance_path = staticmethod(CloudRedisClient.parse_instance_path) + common_billing_account_path = staticmethod( + CloudRedisClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + CloudRedisClient.parse_common_billing_account_path + ) + + common_folder_path = staticmethod(CloudRedisClient.common_folder_path) + parse_common_folder_path = staticmethod(CloudRedisClient.parse_common_folder_path) + + common_organization_path = staticmethod(CloudRedisClient.common_organization_path) + parse_common_organization_path = staticmethod( + CloudRedisClient.parse_common_organization_path + ) + + common_project_path = staticmethod(CloudRedisClient.common_project_path) + parse_common_project_path = staticmethod(CloudRedisClient.parse_common_project_path) + + common_location_path = staticmethod(CloudRedisClient.common_location_path) + parse_common_location_path = staticmethod( + CloudRedisClient.parse_common_location_path + ) + from_service_account_file = CloudRedisClient.from_service_account_file from_service_account_json = from_service_account_file + @property + def transport(self) -> CloudRedisTransport: + """Return the transport used by the client instance. + + Returns: + CloudRedisTransport: The transport used by the client instance. + """ + return self._client.transport + get_transport_class = functools.partial( type(CloudRedisClient).get_transport_class, type(CloudRedisClient) ) @@ -180,7 +212,8 @@ async def list_instances( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and builtins.any([parent]): + has_flattened_params = builtins.any([parent]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -256,7 +289,8 @@ async def get_instance( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and builtins.any([name]): + has_flattened_params = builtins.any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -365,7 +399,8 @@ async def create_instance( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and builtins.any([parent, instance_id, instance]): + has_flattened_params = builtins.any([parent, instance_id, instance]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -470,7 +505,8 @@ async def update_instance( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and builtins.any([update_mask, instance]): + has_flattened_params = builtins.any([update_mask, instance]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -565,7 +601,8 @@ async def upgrade_instance( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and builtins.any([name, redis_version]): + has_flattened_params = builtins.any([name, redis_version]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -665,7 +702,8 @@ async def import_instance( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and builtins.any([name, input_config]): + has_flattened_params = builtins.any([name, input_config]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -761,7 +799,8 @@ async def export_instance( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and builtins.any([name, output_config]): + has_flattened_params = builtins.any([name, output_config]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -856,7 +895,8 @@ async def failover_instance( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and builtins.any([name, data_protection_mode]): + has_flattened_params = builtins.any([name, data_protection_mode]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -954,7 +994,8 @@ async def delete_instance( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and builtins.any([name]): + has_flattened_params = builtins.any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." diff --git a/google/cloud/redis_v1/services/cloud_redis/client.py b/google/cloud/redis_v1/services/cloud_redis/client.py index 899e7ed..8a5206f 100644 --- a/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/google/cloud/redis_v1/services/cloud_redis/client.py @@ -155,6 +155,15 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @property + def transport(self) -> CloudRedisTransport: + """Return the transport used by the client instance. + + Returns: + CloudRedisTransport: The transport used by the client instance. + """ + return self._transport + @staticmethod def instance_path(project: str, location: str, instance: str,) -> str: """Return a fully-qualified instance string.""" @@ -171,6 +180,65 @@ def parse_instance_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Return a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Return a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Return a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Return a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Return a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + def __init__( self, *, @@ -206,10 +274,10 @@ def __init__( not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: diff --git a/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py b/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py index 3b0088e..7959435 100644 --- a/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py +++ b/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py @@ -28,7 +28,6 @@ _transport_registry["grpc"] = CloudRedisGrpcTransport _transport_registry["grpc_asyncio"] = CloudRedisGrpcAsyncIOTransport - __all__ = ( "CloudRedisTransport", "CloudRedisGrpcTransport", diff --git a/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index b54f906..ae96591 100644 --- a/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -111,10 +111,10 @@ def __init__( for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -123,6 +123,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -130,6 +132,7 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: warnings.warn( "api_mtls_endpoint and client_cert_source are deprecated", @@ -165,7 +168,12 @@ def __init__( ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) + self._ssl_channel_credentials = ssl_credentials else: host = host if ":" in host else host + ":443" @@ -182,9 +190,14 @@ def __init__( ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._stubs = {} # type: Dict[str, Callable] + self._operations_client = None # Run the base constructor. super().__init__( @@ -208,7 +221,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optionsl[str]): The host for the channel to use. + address (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -243,12 +256,8 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. + """Return the channel designed to connect to this service. """ - # Return the channel from cache. return self._grpc_channel @property @@ -259,13 +268,11 @@ def operations_client(self) -> operations_v1.OperationsClient: client. """ # Sanity check: Only create a new client if we do not already have one. - if "operations_client" not in self.__dict__: - self.__dict__["operations_client"] = operations_v1.OperationsClient( - self.grpc_channel - ) + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) # Return the client from cache. - return self.__dict__["operations_client"] + return self._operations_client @property def list_instances( diff --git a/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index 30cdc40..b667778 100644 --- a/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -168,6 +168,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -175,6 +177,7 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: warnings.warn( "api_mtls_endpoint and client_cert_source are deprecated", @@ -210,7 +213,12 @@ def __init__( ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) + self._ssl_channel_credentials = ssl_credentials else: host = host if ":" in host else host + ":443" @@ -227,6 +235,10 @@ def __init__( ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) # Run the base constructor. @@ -240,6 +252,7 @@ def __init__( ) self._stubs = {} + self._operations_client = None @property def grpc_channel(self) -> aio.Channel: @@ -259,13 +272,13 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: client. """ # Sanity check: Only create a new client if we do not already have one. - if "operations_client" not in self.__dict__: - self.__dict__["operations_client"] = operations_v1.OperationsAsyncClient( + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( self.grpc_channel ) # Return the client from cache. - return self.__dict__["operations_client"] + return self._operations_client @property def list_instances( diff --git a/google/cloud/redis_v1/types/__init__.py b/google/cloud/redis_v1/types/__init__.py index fb1f7e5..1942d36 100644 --- a/google/cloud/redis_v1/types/__init__.py +++ b/google/cloud/redis_v1/types/__init__.py @@ -36,7 +36,6 @@ ZoneMetadata, ) - __all__ = ( "Instance", "ListInstancesRequest", diff --git a/google/cloud/redis_v1/types/cloud_redis.py b/google/cloud/redis_v1/types/cloud_redis.py index a95e46b..9821f43 100644 --- a/google/cloud/redis_v1/types/cloud_redis.py +++ b/google/cloud/redis_v1/types/cloud_redis.py @@ -292,7 +292,7 @@ class ListInstancesResponse(proto.Message): def raw_page(self): return self - instances = proto.RepeatedField(proto.MESSAGE, number=1, message=Instance,) + instances = proto.RepeatedField(proto.MESSAGE, number=1, message="Instance",) next_page_token = proto.Field(proto.STRING, number=2) @@ -340,7 +340,7 @@ class CreateInstanceRequest(proto.Message): instance_id = proto.Field(proto.STRING, number=2) - instance = proto.Field(proto.MESSAGE, number=3, message=Instance,) + instance = proto.Field(proto.MESSAGE, number=3, message="Instance",) class UpdateInstanceRequest(proto.Message): @@ -365,7 +365,7 @@ class UpdateInstanceRequest(proto.Message): update_mask = proto.Field(proto.MESSAGE, number=1, message=field_mask.FieldMask,) - instance = proto.Field(proto.MESSAGE, number=2, message=Instance,) + instance = proto.Field(proto.MESSAGE, number=2, message="Instance",) class UpgradeInstanceRequest(proto.Message): @@ -423,7 +423,7 @@ class InputConfig(proto.Message): """ gcs_source = proto.Field( - proto.MESSAGE, number=1, oneof="source", message=GcsSource, + proto.MESSAGE, number=1, oneof="source", message="GcsSource", ) @@ -442,7 +442,7 @@ class ImportInstanceRequest(proto.Message): name = proto.Field(proto.STRING, number=1) - input_config = proto.Field(proto.MESSAGE, number=3, message=InputConfig,) + input_config = proto.Field(proto.MESSAGE, number=3, message="InputConfig",) class GcsDestination(proto.Message): @@ -468,7 +468,7 @@ class OutputConfig(proto.Message): """ gcs_destination = proto.Field( - proto.MESSAGE, number=1, oneof="destination", message=GcsDestination, + proto.MESSAGE, number=1, oneof="destination", message="GcsDestination", ) @@ -487,7 +487,7 @@ class ExportInstanceRequest(proto.Message): name = proto.Field(proto.STRING, number=1) - output_config = proto.Field(proto.MESSAGE, number=3, message=OutputConfig,) + output_config = proto.Field(proto.MESSAGE, number=3, message="OutputConfig",) class FailoverInstanceRequest(proto.Message): diff --git a/google/cloud/redis_v1beta1/services/cloud_redis/async_client.py b/google/cloud/redis_v1beta1/services/cloud_redis/async_client.py index 72bb315..fef3d14 100644 --- a/google/cloud/redis_v1beta1/services/cloud_redis/async_client.py +++ b/google/cloud/redis_v1beta1/services/cloud_redis/async_client.py @@ -75,9 +75,41 @@ class CloudRedisAsyncClient: instance_path = staticmethod(CloudRedisClient.instance_path) parse_instance_path = staticmethod(CloudRedisClient.parse_instance_path) + common_billing_account_path = staticmethod( + CloudRedisClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + CloudRedisClient.parse_common_billing_account_path + ) + + common_folder_path = staticmethod(CloudRedisClient.common_folder_path) + parse_common_folder_path = staticmethod(CloudRedisClient.parse_common_folder_path) + + common_organization_path = staticmethod(CloudRedisClient.common_organization_path) + parse_common_organization_path = staticmethod( + CloudRedisClient.parse_common_organization_path + ) + + common_project_path = staticmethod(CloudRedisClient.common_project_path) + parse_common_project_path = staticmethod(CloudRedisClient.parse_common_project_path) + + common_location_path = staticmethod(CloudRedisClient.common_location_path) + parse_common_location_path = staticmethod( + CloudRedisClient.parse_common_location_path + ) + from_service_account_file = CloudRedisClient.from_service_account_file from_service_account_json = from_service_account_file + @property + def transport(self) -> CloudRedisTransport: + """Return the transport used by the client instance. + + Returns: + CloudRedisTransport: The transport used by the client instance. + """ + return self._client.transport + get_transport_class = functools.partial( type(CloudRedisClient).get_transport_class, type(CloudRedisClient) ) @@ -181,7 +213,8 @@ async def list_instances( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and builtins.any([parent]): + has_flattened_params = builtins.any([parent]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -257,7 +290,8 @@ async def get_instance( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and builtins.any([name]): + has_flattened_params = builtins.any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -366,7 +400,8 @@ async def create_instance( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and builtins.any([parent, instance_id, instance]): + has_flattened_params = builtins.any([parent, instance_id, instance]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -471,7 +506,8 @@ async def update_instance( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and builtins.any([update_mask, instance]): + has_flattened_params = builtins.any([update_mask, instance]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -566,7 +602,8 @@ async def upgrade_instance( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and builtins.any([name, redis_version]): + has_flattened_params = builtins.any([name, redis_version]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -666,7 +703,8 @@ async def import_instance( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and builtins.any([name, input_config]): + has_flattened_params = builtins.any([name, input_config]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -762,7 +800,8 @@ async def export_instance( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and builtins.any([name, output_config]): + has_flattened_params = builtins.any([name, output_config]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -857,7 +896,8 @@ async def failover_instance( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and builtins.any([name, data_protection_mode]): + has_flattened_params = builtins.any([name, data_protection_mode]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -955,7 +995,8 @@ async def delete_instance( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and builtins.any([name]): + has_flattened_params = builtins.any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." diff --git a/google/cloud/redis_v1beta1/services/cloud_redis/client.py b/google/cloud/redis_v1beta1/services/cloud_redis/client.py index 5b075bd..ed583d8 100644 --- a/google/cloud/redis_v1beta1/services/cloud_redis/client.py +++ b/google/cloud/redis_v1beta1/services/cloud_redis/client.py @@ -156,6 +156,15 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @property + def transport(self) -> CloudRedisTransport: + """Return the transport used by the client instance. + + Returns: + CloudRedisTransport: The transport used by the client instance. + """ + return self._transport + @staticmethod def instance_path(project: str, location: str, instance: str,) -> str: """Return a fully-qualified instance string.""" @@ -172,6 +181,65 @@ def parse_instance_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Return a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Return a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Return a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Return a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Return a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + def __init__( self, *, @@ -207,10 +275,10 @@ def __init__( not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: diff --git a/google/cloud/redis_v1beta1/services/cloud_redis/transports/__init__.py b/google/cloud/redis_v1beta1/services/cloud_redis/transports/__init__.py index 3b0088e..7959435 100644 --- a/google/cloud/redis_v1beta1/services/cloud_redis/transports/__init__.py +++ b/google/cloud/redis_v1beta1/services/cloud_redis/transports/__init__.py @@ -28,7 +28,6 @@ _transport_registry["grpc"] = CloudRedisGrpcTransport _transport_registry["grpc_asyncio"] = CloudRedisGrpcAsyncIOTransport - __all__ = ( "CloudRedisTransport", "CloudRedisGrpcTransport", diff --git a/google/cloud/redis_v1beta1/services/cloud_redis/transports/grpc.py b/google/cloud/redis_v1beta1/services/cloud_redis/transports/grpc.py index b450f6e..1354c3d 100644 --- a/google/cloud/redis_v1beta1/services/cloud_redis/transports/grpc.py +++ b/google/cloud/redis_v1beta1/services/cloud_redis/transports/grpc.py @@ -111,10 +111,10 @@ def __init__( for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -123,6 +123,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -130,6 +132,7 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: warnings.warn( "api_mtls_endpoint and client_cert_source are deprecated", @@ -165,7 +168,12 @@ def __init__( ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) + self._ssl_channel_credentials = ssl_credentials else: host = host if ":" in host else host + ":443" @@ -182,9 +190,14 @@ def __init__( ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._stubs = {} # type: Dict[str, Callable] + self._operations_client = None # Run the base constructor. super().__init__( @@ -208,7 +221,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optionsl[str]): The host for the channel to use. + address (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -243,12 +256,8 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. + """Return the channel designed to connect to this service. """ - # Return the channel from cache. return self._grpc_channel @property @@ -259,13 +268,11 @@ def operations_client(self) -> operations_v1.OperationsClient: client. """ # Sanity check: Only create a new client if we do not already have one. - if "operations_client" not in self.__dict__: - self.__dict__["operations_client"] = operations_v1.OperationsClient( - self.grpc_channel - ) + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) # Return the client from cache. - return self.__dict__["operations_client"] + return self._operations_client @property def list_instances( diff --git a/google/cloud/redis_v1beta1/services/cloud_redis/transports/grpc_asyncio.py b/google/cloud/redis_v1beta1/services/cloud_redis/transports/grpc_asyncio.py index 239d7d7..b091718 100644 --- a/google/cloud/redis_v1beta1/services/cloud_redis/transports/grpc_asyncio.py +++ b/google/cloud/redis_v1beta1/services/cloud_redis/transports/grpc_asyncio.py @@ -168,6 +168,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -175,6 +177,7 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: warnings.warn( "api_mtls_endpoint and client_cert_source are deprecated", @@ -210,7 +213,12 @@ def __init__( ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) + self._ssl_channel_credentials = ssl_credentials else: host = host if ":" in host else host + ":443" @@ -227,6 +235,10 @@ def __init__( ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) # Run the base constructor. @@ -240,6 +252,7 @@ def __init__( ) self._stubs = {} + self._operations_client = None @property def grpc_channel(self) -> aio.Channel: @@ -259,13 +272,13 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: client. """ # Sanity check: Only create a new client if we do not already have one. - if "operations_client" not in self.__dict__: - self.__dict__["operations_client"] = operations_v1.OperationsAsyncClient( + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( self.grpc_channel ) # Return the client from cache. - return self.__dict__["operations_client"] + return self._operations_client @property def list_instances( diff --git a/google/cloud/redis_v1beta1/types/__init__.py b/google/cloud/redis_v1beta1/types/__init__.py index 3d4931d..d2450f1 100644 --- a/google/cloud/redis_v1beta1/types/__init__.py +++ b/google/cloud/redis_v1beta1/types/__init__.py @@ -35,7 +35,6 @@ ZoneMetadata, ) - __all__ = ( "Instance", "ListInstancesRequest", diff --git a/google/cloud/redis_v1beta1/types/cloud_redis.py b/google/cloud/redis_v1beta1/types/cloud_redis.py index 80ec425..5b7c71f 100644 --- a/google/cloud/redis_v1beta1/types/cloud_redis.py +++ b/google/cloud/redis_v1beta1/types/cloud_redis.py @@ -293,7 +293,7 @@ class ListInstancesResponse(proto.Message): def raw_page(self): return self - instances = proto.RepeatedField(proto.MESSAGE, number=1, message=Instance,) + instances = proto.RepeatedField(proto.MESSAGE, number=1, message="Instance",) next_page_token = proto.Field(proto.STRING, number=2) @@ -341,7 +341,7 @@ class CreateInstanceRequest(proto.Message): instance_id = proto.Field(proto.STRING, number=2) - instance = proto.Field(proto.MESSAGE, number=3, message=Instance,) + instance = proto.Field(proto.MESSAGE, number=3, message="Instance",) class UpdateInstanceRequest(proto.Message): @@ -366,7 +366,7 @@ class UpdateInstanceRequest(proto.Message): update_mask = proto.Field(proto.MESSAGE, number=1, message=field_mask.FieldMask,) - instance = proto.Field(proto.MESSAGE, number=2, message=Instance,) + instance = proto.Field(proto.MESSAGE, number=2, message="Instance",) class UpgradeInstanceRequest(proto.Message): @@ -424,7 +424,7 @@ class InputConfig(proto.Message): """ gcs_source = proto.Field( - proto.MESSAGE, number=1, oneof="source", message=GcsSource, + proto.MESSAGE, number=1, oneof="source", message="GcsSource", ) @@ -443,7 +443,7 @@ class ImportInstanceRequest(proto.Message): name = proto.Field(proto.STRING, number=1) - input_config = proto.Field(proto.MESSAGE, number=3, message=InputConfig,) + input_config = proto.Field(proto.MESSAGE, number=3, message="InputConfig",) class GcsDestination(proto.Message): @@ -469,7 +469,7 @@ class OutputConfig(proto.Message): """ gcs_destination = proto.Field( - proto.MESSAGE, number=1, oneof="destination", message=GcsDestination, + proto.MESSAGE, number=1, oneof="destination", message="GcsDestination", ) @@ -488,7 +488,7 @@ class ExportInstanceRequest(proto.Message): name = proto.Field(proto.STRING, number=1) - output_config = proto.Field(proto.MESSAGE, number=3, message=OutputConfig,) + output_config = proto.Field(proto.MESSAGE, number=3, message="OutputConfig",) class FailoverInstanceRequest(proto.Message): diff --git a/scripts/fixup_redis_v1_keywords.py b/scripts/fixup_redis_v1_keywords.py index 5d1dd0c..b142431 100644 --- a/scripts/fixup_redis_v1_keywords.py +++ b/scripts/fixup_redis_v1_keywords.py @@ -1,3 +1,4 @@ +#! /usr/bin/env python3 # -*- coding: utf-8 -*- # Copyright 2020 Google LLC diff --git a/scripts/fixup_redis_v1beta1_keywords.py b/scripts/fixup_redis_v1beta1_keywords.py index 5d1dd0c..b142431 100644 --- a/scripts/fixup_redis_v1beta1_keywords.py +++ b/scripts/fixup_redis_v1beta1_keywords.py @@ -1,3 +1,4 @@ +#! /usr/bin/env python3 # -*- coding: utf-8 -*- # Copyright 2020 Google LLC diff --git a/synth.metadata b/synth.metadata index ce9a452..e0ca973 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,15 +4,15 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-redis.git", - "sha": "66eecbbddf3ed0c6faaee6c778d561c0ba6e53e2" + "sha": "add073580f4734289adc03abe8a44657c8686951" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "3dbeac0d54125b123c8dfd39c774b37473c36944", - "internalRef": "333159182" + "sha": "dd372aa22ded7a8ba6f0e03a80e06358a3fa0907", + "internalRef": "347055288" } }, { diff --git a/tests/unit/gapic/redis_v1/test_cloud_redis.py b/tests/unit/gapic/redis_v1/test_cloud_redis.py index 449fb13..7712657 100644 --- a/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -95,12 +95,12 @@ def test_cloud_redis_client_from_service_account_file(client_class): ) as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds client = client_class.from_service_account_json("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds - assert client._transport._host == "redis.googleapis.com:443" + assert client.transport._host == "redis.googleapis.com:443" def test_cloud_redis_client_get_transport_class(): @@ -444,7 +444,7 @@ def test_list_instances( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_instances), "__call__") as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.ListInstancesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], @@ -459,6 +459,7 @@ def test_list_instances( assert args[0] == cloud_redis.ListInstancesRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) assert response.next_page_token == "next_page_token_value" @@ -471,19 +472,19 @@ def test_list_instances_from_dict(): @pytest.mark.asyncio -async def test_list_instances_async(transport: str = "grpc_asyncio"): +async def test_list_instances_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.ListInstancesRequest +): client = CloudRedisAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloud_redis.ListInstancesRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_instances), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_redis.ListInstancesResponse( @@ -498,7 +499,7 @@ async def test_list_instances_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloud_redis.ListInstancesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesAsyncPager) @@ -508,6 +509,11 @@ async def test_list_instances_async(transport: str = "grpc_asyncio"): assert response.unreachable == ["unreachable_value"] +@pytest.mark.asyncio +async def test_list_instances_async_from_dict(): + await test_list_instances_async(request_type=dict) + + def test_list_instances_field_headers(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) @@ -517,7 +523,7 @@ def test_list_instances_field_headers(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_instances), "__call__") as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: call.return_value = cloud_redis.ListInstancesResponse() client.list_instances(request) @@ -542,9 +548,7 @@ async def test_list_instances_field_headers_async(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_instances), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_redis.ListInstancesResponse() ) @@ -565,7 +569,7 @@ def test_list_instances_flattened(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_instances), "__call__") as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.ListInstancesResponse() @@ -597,9 +601,7 @@ async def test_list_instances_flattened_async(): client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_instances), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.ListInstancesResponse() @@ -634,7 +636,7 @@ def test_list_instances_pager(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_instances), "__call__") as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( cloud_redis.ListInstancesResponse( @@ -672,7 +674,7 @@ def test_list_instances_pages(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_instances), "__call__") as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( cloud_redis.ListInstancesResponse( @@ -703,9 +705,7 @@ async def test_list_instances_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_instances), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -742,9 +742,7 @@ async def test_list_instances_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_instances), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -784,7 +782,7 @@ def test_get_instance( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_instance), "__call__") as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.Instance( name="name_value", @@ -814,6 +812,7 @@ def test_get_instance( assert args[0] == cloud_redis.GetInstanceRequest() # Establish that the response is the type that we expect. + assert isinstance(response, cloud_redis.Instance) assert response.name == "name_value" @@ -854,19 +853,19 @@ def test_get_instance_from_dict(): @pytest.mark.asyncio -async def test_get_instance_async(transport: str = "grpc_asyncio"): +async def test_get_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.GetInstanceRequest +): client = CloudRedisAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloud_redis.GetInstanceRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_redis.Instance( @@ -895,7 +894,7 @@ async def test_get_instance_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloud_redis.GetInstanceRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.Instance) @@ -933,6 +932,11 @@ async def test_get_instance_async(transport: str = "grpc_asyncio"): assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING +@pytest.mark.asyncio +async def test_get_instance_async_from_dict(): + await test_get_instance_async(request_type=dict) + + def test_get_instance_field_headers(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) @@ -942,7 +946,7 @@ def test_get_instance_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_instance), "__call__") as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: call.return_value = cloud_redis.Instance() client.get_instance(request) @@ -967,9 +971,7 @@ async def test_get_instance_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_redis.Instance() ) @@ -990,7 +992,7 @@ def test_get_instance_flattened(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_instance), "__call__") as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.Instance() @@ -1022,9 +1024,7 @@ async def test_get_instance_flattened_async(): client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.Instance() @@ -1067,7 +1067,7 @@ def test_create_instance( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_instance), "__call__") as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") @@ -1088,19 +1088,19 @@ def test_create_instance_from_dict(): @pytest.mark.asyncio -async def test_create_instance_async(transport: str = "grpc_asyncio"): +async def test_create_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.CreateInstanceRequest +): client = CloudRedisAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloud_redis.CreateInstanceRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") @@ -1112,12 +1112,17 @@ async def test_create_instance_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloud_redis.CreateInstanceRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +@pytest.mark.asyncio +async def test_create_instance_async_from_dict(): + await test_create_instance_async(request_type=dict) + + def test_create_instance_field_headers(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) @@ -1127,7 +1132,7 @@ def test_create_instance_field_headers(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_instance), "__call__") as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.create_instance(request) @@ -1152,9 +1157,7 @@ async def test_create_instance_field_headers_async(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) @@ -1175,7 +1178,7 @@ def test_create_instance_flattened(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_instance), "__call__") as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -1218,9 +1221,7 @@ async def test_create_instance_flattened_async(): client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -1274,7 +1275,7 @@ def test_update_instance( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_instance), "__call__") as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") @@ -1295,19 +1296,19 @@ def test_update_instance_from_dict(): @pytest.mark.asyncio -async def test_update_instance_async(transport: str = "grpc_asyncio"): +async def test_update_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.UpdateInstanceRequest +): client = CloudRedisAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloud_redis.UpdateInstanceRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") @@ -1319,12 +1320,17 @@ async def test_update_instance_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloud_redis.UpdateInstanceRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +@pytest.mark.asyncio +async def test_update_instance_async_from_dict(): + await test_update_instance_async(request_type=dict) + + def test_update_instance_field_headers(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) @@ -1334,7 +1340,7 @@ def test_update_instance_field_headers(): request.instance.name = "instance.name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_instance), "__call__") as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.update_instance(request) @@ -1361,9 +1367,7 @@ async def test_update_instance_field_headers_async(): request.instance.name = "instance.name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) @@ -1386,7 +1390,7 @@ def test_update_instance_flattened(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_instance), "__call__") as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -1425,9 +1429,7 @@ async def test_update_instance_flattened_async(): client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -1477,9 +1479,7 @@ def test_upgrade_instance( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.upgrade_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") @@ -1500,19 +1500,19 @@ def test_upgrade_instance_from_dict(): @pytest.mark.asyncio -async def test_upgrade_instance_async(transport: str = "grpc_asyncio"): +async def test_upgrade_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.UpgradeInstanceRequest +): client = CloudRedisAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloud_redis.UpgradeInstanceRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.upgrade_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") @@ -1524,12 +1524,17 @@ async def test_upgrade_instance_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloud_redis.UpgradeInstanceRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +@pytest.mark.asyncio +async def test_upgrade_instance_async_from_dict(): + await test_upgrade_instance_async(request_type=dict) + + def test_upgrade_instance_field_headers(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) @@ -1539,9 +1544,7 @@ def test_upgrade_instance_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.upgrade_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.upgrade_instance(request) @@ -1566,9 +1569,7 @@ async def test_upgrade_instance_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.upgrade_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) @@ -1589,9 +1590,7 @@ def test_upgrade_instance_flattened(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.upgrade_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -1629,9 +1628,7 @@ async def test_upgrade_instance_flattened_async(): client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.upgrade_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -1680,7 +1677,7 @@ def test_import_instance( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.import_instance), "__call__") as call: + with mock.patch.object(type(client.transport.import_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") @@ -1701,19 +1698,19 @@ def test_import_instance_from_dict(): @pytest.mark.asyncio -async def test_import_instance_async(transport: str = "grpc_asyncio"): +async def test_import_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.ImportInstanceRequest +): client = CloudRedisAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloud_redis.ImportInstanceRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.import_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.import_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") @@ -1725,12 +1722,17 @@ async def test_import_instance_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloud_redis.ImportInstanceRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +@pytest.mark.asyncio +async def test_import_instance_async_from_dict(): + await test_import_instance_async(request_type=dict) + + def test_import_instance_field_headers(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) @@ -1740,7 +1742,7 @@ def test_import_instance_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.import_instance), "__call__") as call: + with mock.patch.object(type(client.transport.import_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.import_instance(request) @@ -1765,9 +1767,7 @@ async def test_import_instance_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.import_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.import_instance), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) @@ -1788,7 +1788,7 @@ def test_import_instance_flattened(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.import_instance), "__call__") as call: + with mock.patch.object(type(client.transport.import_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -1833,9 +1833,7 @@ async def test_import_instance_flattened_async(): client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.import_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.import_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -1891,7 +1889,7 @@ def test_export_instance( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.export_instance), "__call__") as call: + with mock.patch.object(type(client.transport.export_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") @@ -1912,19 +1910,19 @@ def test_export_instance_from_dict(): @pytest.mark.asyncio -async def test_export_instance_async(transport: str = "grpc_asyncio"): +async def test_export_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.ExportInstanceRequest +): client = CloudRedisAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloud_redis.ExportInstanceRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.export_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.export_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") @@ -1936,12 +1934,17 @@ async def test_export_instance_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloud_redis.ExportInstanceRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +@pytest.mark.asyncio +async def test_export_instance_async_from_dict(): + await test_export_instance_async(request_type=dict) + + def test_export_instance_field_headers(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) @@ -1951,7 +1954,7 @@ def test_export_instance_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.export_instance), "__call__") as call: + with mock.patch.object(type(client.transport.export_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.export_instance(request) @@ -1976,9 +1979,7 @@ async def test_export_instance_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.export_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.export_instance), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) @@ -1999,7 +2000,7 @@ def test_export_instance_flattened(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.export_instance), "__call__") as call: + with mock.patch.object(type(client.transport.export_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -2044,9 +2045,7 @@ async def test_export_instance_flattened_async(): client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.export_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.export_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -2103,7 +2102,7 @@ def test_failover_instance( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.failover_instance), "__call__" + type(client.transport.failover_instance), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") @@ -2125,18 +2124,20 @@ def test_failover_instance_from_dict(): @pytest.mark.asyncio -async def test_failover_instance_async(transport: str = "grpc_asyncio"): +async def test_failover_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.FailoverInstanceRequest +): client = CloudRedisAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloud_redis.FailoverInstanceRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.failover_instance), "__call__" + type(client.transport.failover_instance), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -2149,12 +2150,17 @@ async def test_failover_instance_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloud_redis.FailoverInstanceRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +@pytest.mark.asyncio +async def test_failover_instance_async_from_dict(): + await test_failover_instance_async(request_type=dict) + + def test_failover_instance_field_headers(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) @@ -2165,7 +2171,7 @@ def test_failover_instance_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.failover_instance), "__call__" + type(client.transport.failover_instance), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") @@ -2192,7 +2198,7 @@ async def test_failover_instance_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.failover_instance), "__call__" + type(client.transport.failover_instance), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") @@ -2215,7 +2221,7 @@ def test_failover_instance_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.failover_instance), "__call__" + type(client.transport.failover_instance), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -2259,7 +2265,7 @@ async def test_failover_instance_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.failover_instance), "__call__" + type(client.transport.failover_instance), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -2313,7 +2319,7 @@ def test_delete_instance( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_instance), "__call__") as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") @@ -2334,19 +2340,19 @@ def test_delete_instance_from_dict(): @pytest.mark.asyncio -async def test_delete_instance_async(transport: str = "grpc_asyncio"): +async def test_delete_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.DeleteInstanceRequest +): client = CloudRedisAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloud_redis.DeleteInstanceRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") @@ -2358,12 +2364,17 @@ async def test_delete_instance_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloud_redis.DeleteInstanceRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +@pytest.mark.asyncio +async def test_delete_instance_async_from_dict(): + await test_delete_instance_async(request_type=dict) + + def test_delete_instance_field_headers(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) @@ -2373,7 +2384,7 @@ def test_delete_instance_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_instance), "__call__") as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.delete_instance(request) @@ -2398,9 +2409,7 @@ async def test_delete_instance_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) @@ -2421,7 +2430,7 @@ def test_delete_instance_flattened(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_instance), "__call__") as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -2453,9 +2462,7 @@ async def test_delete_instance_flattened_async(): client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -2522,7 +2529,7 @@ def test_transport_instance(): credentials=credentials.AnonymousCredentials(), ) client = CloudRedisClient(transport=transport) - assert client._transport is transport + assert client.transport is transport def test_transport_get_channel(): @@ -2555,7 +2562,7 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) - assert isinstance(client._transport, transports.CloudRedisGrpcTransport,) + assert isinstance(client.transport, transports.CloudRedisGrpcTransport,) def test_cloud_redis_base_transport_error(): @@ -2662,7 +2669,7 @@ def test_cloud_redis_host_no_port(): api_endpoint="redis.googleapis.com" ), ) - assert client._transport._host == "redis.googleapis.com:443" + assert client.transport._host == "redis.googleapis.com:443" def test_cloud_redis_host_with_port(): @@ -2672,7 +2679,7 @@ def test_cloud_redis_host_with_port(): api_endpoint="redis.googleapis.com:8000" ), ) - assert client._transport._host == "redis.googleapis.com:8000" + assert client.transport._host == "redis.googleapis.com:8000" def test_cloud_redis_grpc_transport_channel(): @@ -2684,6 +2691,7 @@ def test_cloud_redis_grpc_transport_channel(): ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None def test_cloud_redis_grpc_asyncio_transport_channel(): @@ -2695,6 +2703,7 @@ def test_cloud_redis_grpc_asyncio_transport_channel(): ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None @pytest.mark.parametrize( @@ -2735,8 +2744,13 @@ def test_cloud_redis_transport_channel_mtls_with_client_cert_source(transport_cl scopes=("https://www.googleapis.com/auth/cloud-platform",), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred @pytest.mark.parametrize( @@ -2772,6 +2786,10 @@ def test_cloud_redis_transport_channel_mtls_with_adc(transport_class): scopes=("https://www.googleapis.com/auth/cloud-platform",), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel @@ -2780,7 +2798,7 @@ def test_cloud_redis_grpc_lro_client(): client = CloudRedisClient( credentials=credentials.AnonymousCredentials(), transport="grpc", ) - transport = client._transport + transport = client.transport # Ensure that we have a api-core operations client. assert isinstance(transport.operations_client, operations_v1.OperationsClient,) @@ -2793,7 +2811,7 @@ def test_cloud_redis_grpc_lro_async_client(): client = CloudRedisAsyncClient( credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", ) - transport = client._client._transport + transport = client.transport # Ensure that we have a api-core operations client. assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) @@ -2827,6 +2845,107 @@ def test_parse_instance_path(): assert expected == actual +def test_common_billing_account_path(): + billing_account = "cuttlefish" + + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = CloudRedisClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = CloudRedisClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + + expected = "folders/{folder}".format(folder=folder,) + actual = CloudRedisClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = CloudRedisClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + + expected = "organizations/{organization}".format(organization=organization,) + actual = CloudRedisClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = CloudRedisClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + + expected = "projects/{project}".format(project=project,) + actual = CloudRedisClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = CloudRedisClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = CloudRedisClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = CloudRedisClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_location_path(path) + assert expected == actual + + def test_client_withDEFAULT_CLIENT_INFO(): client_info = gapic_v1.client_info.ClientInfo() diff --git a/tests/unit/gapic/redis_v1beta1/test_cloud_redis.py b/tests/unit/gapic/redis_v1beta1/test_cloud_redis.py index 1b1a26b..8254137 100644 --- a/tests/unit/gapic/redis_v1beta1/test_cloud_redis.py +++ b/tests/unit/gapic/redis_v1beta1/test_cloud_redis.py @@ -96,12 +96,12 @@ def test_cloud_redis_client_from_service_account_file(client_class): ) as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds client = client_class.from_service_account_json("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds - assert client._transport._host == "redis.googleapis.com:443" + assert client.transport._host == "redis.googleapis.com:443" def test_cloud_redis_client_get_transport_class(): @@ -445,7 +445,7 @@ def test_list_instances( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_instances), "__call__") as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.ListInstancesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], @@ -460,6 +460,7 @@ def test_list_instances( assert args[0] == cloud_redis.ListInstancesRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) assert response.next_page_token == "next_page_token_value" @@ -472,19 +473,19 @@ def test_list_instances_from_dict(): @pytest.mark.asyncio -async def test_list_instances_async(transport: str = "grpc_asyncio"): +async def test_list_instances_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.ListInstancesRequest +): client = CloudRedisAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloud_redis.ListInstancesRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_instances), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_redis.ListInstancesResponse( @@ -499,7 +500,7 @@ async def test_list_instances_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloud_redis.ListInstancesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesAsyncPager) @@ -509,6 +510,11 @@ async def test_list_instances_async(transport: str = "grpc_asyncio"): assert response.unreachable == ["unreachable_value"] +@pytest.mark.asyncio +async def test_list_instances_async_from_dict(): + await test_list_instances_async(request_type=dict) + + def test_list_instances_field_headers(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) @@ -518,7 +524,7 @@ def test_list_instances_field_headers(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_instances), "__call__") as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: call.return_value = cloud_redis.ListInstancesResponse() client.list_instances(request) @@ -543,9 +549,7 @@ async def test_list_instances_field_headers_async(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_instances), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_redis.ListInstancesResponse() ) @@ -566,7 +570,7 @@ def test_list_instances_flattened(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_instances), "__call__") as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.ListInstancesResponse() @@ -598,9 +602,7 @@ async def test_list_instances_flattened_async(): client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_instances), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.ListInstancesResponse() @@ -635,7 +637,7 @@ def test_list_instances_pager(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_instances), "__call__") as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( cloud_redis.ListInstancesResponse( @@ -673,7 +675,7 @@ def test_list_instances_pages(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_instances), "__call__") as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( cloud_redis.ListInstancesResponse( @@ -704,9 +706,7 @@ async def test_list_instances_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_instances), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -743,9 +743,7 @@ async def test_list_instances_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_instances), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -785,7 +783,7 @@ def test_get_instance( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_instance), "__call__") as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.Instance( name="name_value", @@ -815,6 +813,7 @@ def test_get_instance( assert args[0] == cloud_redis.GetInstanceRequest() # Establish that the response is the type that we expect. + assert isinstance(response, cloud_redis.Instance) assert response.name == "name_value" @@ -855,19 +854,19 @@ def test_get_instance_from_dict(): @pytest.mark.asyncio -async def test_get_instance_async(transport: str = "grpc_asyncio"): +async def test_get_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.GetInstanceRequest +): client = CloudRedisAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloud_redis.GetInstanceRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_redis.Instance( @@ -896,7 +895,7 @@ async def test_get_instance_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloud_redis.GetInstanceRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.Instance) @@ -934,6 +933,11 @@ async def test_get_instance_async(transport: str = "grpc_asyncio"): assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING +@pytest.mark.asyncio +async def test_get_instance_async_from_dict(): + await test_get_instance_async(request_type=dict) + + def test_get_instance_field_headers(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) @@ -943,7 +947,7 @@ def test_get_instance_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_instance), "__call__") as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: call.return_value = cloud_redis.Instance() client.get_instance(request) @@ -968,9 +972,7 @@ async def test_get_instance_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_redis.Instance() ) @@ -991,7 +993,7 @@ def test_get_instance_flattened(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_instance), "__call__") as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.Instance() @@ -1023,9 +1025,7 @@ async def test_get_instance_flattened_async(): client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.Instance() @@ -1068,7 +1068,7 @@ def test_create_instance( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_instance), "__call__") as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") @@ -1089,19 +1089,19 @@ def test_create_instance_from_dict(): @pytest.mark.asyncio -async def test_create_instance_async(transport: str = "grpc_asyncio"): +async def test_create_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.CreateInstanceRequest +): client = CloudRedisAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloud_redis.CreateInstanceRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") @@ -1113,12 +1113,17 @@ async def test_create_instance_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloud_redis.CreateInstanceRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +@pytest.mark.asyncio +async def test_create_instance_async_from_dict(): + await test_create_instance_async(request_type=dict) + + def test_create_instance_field_headers(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) @@ -1128,7 +1133,7 @@ def test_create_instance_field_headers(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_instance), "__call__") as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.create_instance(request) @@ -1153,9 +1158,7 @@ async def test_create_instance_field_headers_async(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) @@ -1176,7 +1179,7 @@ def test_create_instance_flattened(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_instance), "__call__") as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -1219,9 +1222,7 @@ async def test_create_instance_flattened_async(): client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -1275,7 +1276,7 @@ def test_update_instance( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_instance), "__call__") as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") @@ -1296,19 +1297,19 @@ def test_update_instance_from_dict(): @pytest.mark.asyncio -async def test_update_instance_async(transport: str = "grpc_asyncio"): +async def test_update_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.UpdateInstanceRequest +): client = CloudRedisAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloud_redis.UpdateInstanceRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") @@ -1320,12 +1321,17 @@ async def test_update_instance_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloud_redis.UpdateInstanceRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +@pytest.mark.asyncio +async def test_update_instance_async_from_dict(): + await test_update_instance_async(request_type=dict) + + def test_update_instance_field_headers(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) @@ -1335,7 +1341,7 @@ def test_update_instance_field_headers(): request.instance.name = "instance.name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_instance), "__call__") as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.update_instance(request) @@ -1362,9 +1368,7 @@ async def test_update_instance_field_headers_async(): request.instance.name = "instance.name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) @@ -1387,7 +1391,7 @@ def test_update_instance_flattened(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_instance), "__call__") as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -1426,9 +1430,7 @@ async def test_update_instance_flattened_async(): client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -1478,9 +1480,7 @@ def test_upgrade_instance( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.upgrade_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") @@ -1501,19 +1501,19 @@ def test_upgrade_instance_from_dict(): @pytest.mark.asyncio -async def test_upgrade_instance_async(transport: str = "grpc_asyncio"): +async def test_upgrade_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.UpgradeInstanceRequest +): client = CloudRedisAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloud_redis.UpgradeInstanceRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.upgrade_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") @@ -1525,12 +1525,17 @@ async def test_upgrade_instance_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloud_redis.UpgradeInstanceRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +@pytest.mark.asyncio +async def test_upgrade_instance_async_from_dict(): + await test_upgrade_instance_async(request_type=dict) + + def test_upgrade_instance_field_headers(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) @@ -1540,9 +1545,7 @@ def test_upgrade_instance_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.upgrade_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.upgrade_instance(request) @@ -1567,9 +1570,7 @@ async def test_upgrade_instance_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.upgrade_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) @@ -1590,9 +1591,7 @@ def test_upgrade_instance_flattened(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.upgrade_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -1630,9 +1629,7 @@ async def test_upgrade_instance_flattened_async(): client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.upgrade_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -1681,7 +1678,7 @@ def test_import_instance( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.import_instance), "__call__") as call: + with mock.patch.object(type(client.transport.import_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") @@ -1702,19 +1699,19 @@ def test_import_instance_from_dict(): @pytest.mark.asyncio -async def test_import_instance_async(transport: str = "grpc_asyncio"): +async def test_import_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.ImportInstanceRequest +): client = CloudRedisAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloud_redis.ImportInstanceRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.import_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.import_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") @@ -1726,12 +1723,17 @@ async def test_import_instance_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloud_redis.ImportInstanceRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +@pytest.mark.asyncio +async def test_import_instance_async_from_dict(): + await test_import_instance_async(request_type=dict) + + def test_import_instance_field_headers(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) @@ -1741,7 +1743,7 @@ def test_import_instance_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.import_instance), "__call__") as call: + with mock.patch.object(type(client.transport.import_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.import_instance(request) @@ -1766,9 +1768,7 @@ async def test_import_instance_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.import_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.import_instance), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) @@ -1789,7 +1789,7 @@ def test_import_instance_flattened(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.import_instance), "__call__") as call: + with mock.patch.object(type(client.transport.import_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -1834,9 +1834,7 @@ async def test_import_instance_flattened_async(): client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.import_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.import_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -1892,7 +1890,7 @@ def test_export_instance( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.export_instance), "__call__") as call: + with mock.patch.object(type(client.transport.export_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") @@ -1913,19 +1911,19 @@ def test_export_instance_from_dict(): @pytest.mark.asyncio -async def test_export_instance_async(transport: str = "grpc_asyncio"): +async def test_export_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.ExportInstanceRequest +): client = CloudRedisAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloud_redis.ExportInstanceRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.export_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.export_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") @@ -1937,12 +1935,17 @@ async def test_export_instance_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloud_redis.ExportInstanceRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +@pytest.mark.asyncio +async def test_export_instance_async_from_dict(): + await test_export_instance_async(request_type=dict) + + def test_export_instance_field_headers(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) @@ -1952,7 +1955,7 @@ def test_export_instance_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.export_instance), "__call__") as call: + with mock.patch.object(type(client.transport.export_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.export_instance(request) @@ -1977,9 +1980,7 @@ async def test_export_instance_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.export_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.export_instance), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) @@ -2000,7 +2001,7 @@ def test_export_instance_flattened(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.export_instance), "__call__") as call: + with mock.patch.object(type(client.transport.export_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -2045,9 +2046,7 @@ async def test_export_instance_flattened_async(): client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.export_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.export_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -2104,7 +2103,7 @@ def test_failover_instance( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.failover_instance), "__call__" + type(client.transport.failover_instance), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") @@ -2126,18 +2125,20 @@ def test_failover_instance_from_dict(): @pytest.mark.asyncio -async def test_failover_instance_async(transport: str = "grpc_asyncio"): +async def test_failover_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.FailoverInstanceRequest +): client = CloudRedisAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloud_redis.FailoverInstanceRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.failover_instance), "__call__" + type(client.transport.failover_instance), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -2150,12 +2151,17 @@ async def test_failover_instance_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloud_redis.FailoverInstanceRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +@pytest.mark.asyncio +async def test_failover_instance_async_from_dict(): + await test_failover_instance_async(request_type=dict) + + def test_failover_instance_field_headers(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) @@ -2166,7 +2172,7 @@ def test_failover_instance_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.failover_instance), "__call__" + type(client.transport.failover_instance), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") @@ -2193,7 +2199,7 @@ async def test_failover_instance_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.failover_instance), "__call__" + type(client.transport.failover_instance), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") @@ -2216,7 +2222,7 @@ def test_failover_instance_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.failover_instance), "__call__" + type(client.transport.failover_instance), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -2260,7 +2266,7 @@ async def test_failover_instance_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.failover_instance), "__call__" + type(client.transport.failover_instance), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -2314,7 +2320,7 @@ def test_delete_instance( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_instance), "__call__") as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") @@ -2335,19 +2341,19 @@ def test_delete_instance_from_dict(): @pytest.mark.asyncio -async def test_delete_instance_async(transport: str = "grpc_asyncio"): +async def test_delete_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.DeleteInstanceRequest +): client = CloudRedisAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloud_redis.DeleteInstanceRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") @@ -2359,12 +2365,17 @@ async def test_delete_instance_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloud_redis.DeleteInstanceRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +@pytest.mark.asyncio +async def test_delete_instance_async_from_dict(): + await test_delete_instance_async(request_type=dict) + + def test_delete_instance_field_headers(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) @@ -2374,7 +2385,7 @@ def test_delete_instance_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_instance), "__call__") as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.delete_instance(request) @@ -2399,9 +2410,7 @@ async def test_delete_instance_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) @@ -2422,7 +2431,7 @@ def test_delete_instance_flattened(): client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_instance), "__call__") as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -2454,9 +2463,7 @@ async def test_delete_instance_flattened_async(): client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -2523,7 +2530,7 @@ def test_transport_instance(): credentials=credentials.AnonymousCredentials(), ) client = CloudRedisClient(transport=transport) - assert client._transport is transport + assert client.transport is transport def test_transport_get_channel(): @@ -2556,7 +2563,7 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),) - assert isinstance(client._transport, transports.CloudRedisGrpcTransport,) + assert isinstance(client.transport, transports.CloudRedisGrpcTransport,) def test_cloud_redis_base_transport_error(): @@ -2663,7 +2670,7 @@ def test_cloud_redis_host_no_port(): api_endpoint="redis.googleapis.com" ), ) - assert client._transport._host == "redis.googleapis.com:443" + assert client.transport._host == "redis.googleapis.com:443" def test_cloud_redis_host_with_port(): @@ -2673,7 +2680,7 @@ def test_cloud_redis_host_with_port(): api_endpoint="redis.googleapis.com:8000" ), ) - assert client._transport._host == "redis.googleapis.com:8000" + assert client.transport._host == "redis.googleapis.com:8000" def test_cloud_redis_grpc_transport_channel(): @@ -2685,6 +2692,7 @@ def test_cloud_redis_grpc_transport_channel(): ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None def test_cloud_redis_grpc_asyncio_transport_channel(): @@ -2696,6 +2704,7 @@ def test_cloud_redis_grpc_asyncio_transport_channel(): ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None @pytest.mark.parametrize( @@ -2736,8 +2745,13 @@ def test_cloud_redis_transport_channel_mtls_with_client_cert_source(transport_cl scopes=("https://www.googleapis.com/auth/cloud-platform",), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred @pytest.mark.parametrize( @@ -2773,6 +2787,10 @@ def test_cloud_redis_transport_channel_mtls_with_adc(transport_class): scopes=("https://www.googleapis.com/auth/cloud-platform",), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel @@ -2781,7 +2799,7 @@ def test_cloud_redis_grpc_lro_client(): client = CloudRedisClient( credentials=credentials.AnonymousCredentials(), transport="grpc", ) - transport = client._transport + transport = client.transport # Ensure that we have a api-core operations client. assert isinstance(transport.operations_client, operations_v1.OperationsClient,) @@ -2794,7 +2812,7 @@ def test_cloud_redis_grpc_lro_async_client(): client = CloudRedisAsyncClient( credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", ) - transport = client._client._transport + transport = client.transport # Ensure that we have a api-core operations client. assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) @@ -2828,6 +2846,107 @@ def test_parse_instance_path(): assert expected == actual +def test_common_billing_account_path(): + billing_account = "cuttlefish" + + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = CloudRedisClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = CloudRedisClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + + expected = "folders/{folder}".format(folder=folder,) + actual = CloudRedisClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = CloudRedisClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + + expected = "organizations/{organization}".format(organization=organization,) + actual = CloudRedisClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = CloudRedisClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + + expected = "projects/{project}".format(project=project,) + actual = CloudRedisClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = CloudRedisClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = CloudRedisClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = CloudRedisClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_location_path(path) + assert expected == actual + + def test_client_withDEFAULT_CLIENT_INFO(): client_info = gapic_v1.client_info.ClientInfo() From 639345127d342927a0db213396da7b4a82a235f5 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 23 Dec 2020 14:00:23 -0800 Subject: [PATCH 5/7] chore: update templates (#39) --- .flake8 | 1 + .kokoro/docs/common.cfg | 2 +- .kokoro/populate-secrets.sh | 43 ++++++++++ .kokoro/release/common.cfg | 50 +++-------- .kokoro/samples/python3.6/common.cfg | 6 ++ .kokoro/samples/python3.7/common.cfg | 6 ++ .kokoro/samples/python3.8/common.cfg | 6 ++ .kokoro/test-samples.sh | 8 +- .kokoro/trampoline.sh | 15 ++-- .pre-commit-config.yaml | 17 ++++ CODE_OF_CONDUCT.md | 123 +++++++++++++++++++-------- CONTRIBUTING.rst | 40 ++++----- docs/conf.py | 8 +- noxfile.py | 9 +- synth.metadata | 4 +- 15 files changed, 225 insertions(+), 113 deletions(-) create mode 100755 .kokoro/populate-secrets.sh create mode 100644 .pre-commit-config.yaml diff --git a/.flake8 b/.flake8 index ed93163..29227d4 100644 --- a/.flake8 +++ b/.flake8 @@ -26,6 +26,7 @@ exclude = *_pb2.py # Standard linting exemptions. + **/.nox/** __pycache__, .git, *.pyc, diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg index 513dec2..6084b77 100644 --- a/.kokoro/docs/common.cfg +++ b/.kokoro/docs/common.cfg @@ -30,7 +30,7 @@ env_vars: { env_vars: { key: "V2_STAGING_BUCKET" - value: "docs-staging-v2-staging" + value: "docs-staging-v2" } # It will upload the docker image after successful builds. diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh new file mode 100755 index 0000000..f525142 --- /dev/null +++ b/.kokoro/populate-secrets.sh @@ -0,0 +1,43 @@ +#!/bin/bash +# Copyright 2020 Google LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;} +function msg { println "$*" >&2 ;} +function println { printf '%s\n' "$(now) $*" ;} + + +# Populates requested secrets set in SECRET_MANAGER_KEYS from service account: +# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com +SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" +msg "Creating folder on disk for secrets: ${SECRET_LOCATION}" +mkdir -p ${SECRET_LOCATION} +for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") +do + msg "Retrieving secret ${key}" + docker run --entrypoint=gcloud \ + --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \ + gcr.io/google.com/cloudsdktool/cloud-sdk \ + secrets versions access latest \ + --project cloud-devrel-kokoro-resources \ + --secret ${key} > \ + "${SECRET_LOCATION}/${key}" + if [[ $? == 0 ]]; then + msg "Secret written to ${SECRET_LOCATION}/${key}" + else + msg "Error retrieving secret ${key}" + fi +done diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index 3acf74c..2883cc9 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -23,42 +23,18 @@ env_vars: { value: "github/python-redis/.kokoro/release.sh" } -# Fetch the token needed for reporting release status to GitHub -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "yoshi-automation-github-key" - } - } -} - -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google_cloud_pypi_password" - } - } -} - -# Fetch magictoken to use with Magic Github Proxy -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "releasetool-magictoken" - } - } +# Fetch PyPI password +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google_cloud_pypi_password" + } + } } -# Fetch api key to use with Magic Github Proxy -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "magic-github-proxy-api-key" - } - } -} +# Tokens needed to report release status back to GitHub +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg index 5f7862a..2f0d48e 100644 --- a/.kokoro/samples/python3.6/common.cfg +++ b/.kokoro/samples/python3.6/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.6" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py36" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-redis/.kokoro/test-samples.sh" diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg index c87d3ce..38d68b6 100644 --- a/.kokoro/samples/python3.7/common.cfg +++ b/.kokoro/samples/python3.7/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.7" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py37" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-redis/.kokoro/test-samples.sh" diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg index c5eb911..c1a841b 100644 --- a/.kokoro/samples/python3.8/common.cfg +++ b/.kokoro/samples/python3.8/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.8" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py38" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-redis/.kokoro/test-samples.sh" diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index d2fa4a4..1dddce5 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -28,6 +28,12 @@ if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then git checkout $LATEST_RELEASE fi +# Exit early if samples directory doesn't exist +if [ ! -d "./samples" ]; then + echo "No tests run. `./samples` not found" + exit 0 +fi + # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -101,4 +107,4 @@ cd "$ROOT" # Workaround for Kokoro permissions issue: delete secrets rm testing/{test-env.sh,client-secrets.json,service-account.json} -exit "$RTN" \ No newline at end of file +exit "$RTN" diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh index e8c4251..f39236e 100755 --- a/.kokoro/trampoline.sh +++ b/.kokoro/trampoline.sh @@ -15,9 +15,14 @@ set -eo pipefail -python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" || ret_code=$? +# Always run the cleanup script, regardless of the success of bouncing into +# the container. +function cleanup() { + chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + echo "cleanup"; +} +trap cleanup EXIT -chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh -${KOKORO_GFILE_DIR}/trampoline_cleanup.sh || true - -exit ${ret_code} +$(dirname $0)/populate-secrets.sh # Secret Manager secrets. +python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" \ No newline at end of file diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..a9024b1 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,17 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v3.4.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml +- repo: https://github.com/psf/black + rev: 19.10b0 + hooks: + - id: black +- repo: https://gitlab.com/pycqa/flake8 + rev: 3.8.4 + hooks: + - id: flake8 diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index b3d1f60..039f436 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -1,44 +1,95 @@ -# Contributor Code of Conduct +# Code of Conduct -As contributors and maintainers of this project, -and in the interest of fostering an open and welcoming community, -we pledge to respect all people who contribute through reporting issues, -posting feature requests, updating documentation, -submitting pull requests or patches, and other activities. +## Our Pledge -We are committed to making participation in this project -a harassment-free experience for everyone, -regardless of level of experience, gender, gender identity and expression, -sexual orientation, disability, personal appearance, -body size, race, ethnicity, age, religion, or nationality. +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members Examples of unacceptable behavior by participants include: -* The use of sexualized language or imagery -* Personal attacks -* Trolling or insulting/derogatory comments -* Public or private harassment -* Publishing other's private information, -such as physical or electronic -addresses, without explicit permission -* Other unethical or unprofessional conduct. +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions -that are not aligned to this Code of Conduct. -By adopting this Code of Conduct, -project maintainers commit themselves to fairly and consistently -applying these principles to every aspect of managing this project. -Project maintainers who do not follow or enforce the Code of Conduct -may be permanently removed from the project team. - -This code of conduct applies both within project spaces and in public spaces -when an individual is representing the project or its community. - -Instances of abusive, harassing, or otherwise unacceptable behavior -may be reported by opening an issue -or contacting one or more of the project maintainers. - -This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0, -available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/) +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 413b921..7d0f2f0 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -21,8 +21,8 @@ In order to add a feature: - The feature must be documented in both the API and narrative documentation. -- The feature must work fully on the following CPython versions: 2.7, - 3.5, 3.6, 3.7 and 3.8 on both UNIX and Windows. +- The feature must work fully on the following CPython versions: + 3.6, 3.7, 3.8 and 3.9 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -80,25 +80,6 @@ We use `nox `__ to instrument our tests. .. nox: https://pypi.org/project/nox/ -Note on Editable Installs / Develop Mode -======================================== - -- As mentioned previously, using ``setuptools`` in `develop mode`_ - or a ``pip`` `editable install`_ is not possible with this - library. This is because this library uses `namespace packages`_. - For context see `Issue #2316`_ and the relevant `PyPA issue`_. - - Since ``editable`` / ``develop`` mode can't be used, packages - need to be installed directly. Hence your changes to the source - tree don't get incorporated into the **already installed** - package. - -.. _namespace packages: https://www.python.org/dev/peps/pep-0420/ -.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316 -.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12 -.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode -.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs - ***************************************** I'm getting weird errors... Can you help? ***************************************** @@ -130,6 +111,16 @@ Coding Style should point to the official ``googleapis`` checkout and the the branch should be the main branch on that remote (``master``). +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + Exceptions to PEP8: - Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for @@ -211,25 +202,24 @@ Supported Python Versions We support: -- `Python 3.5`_ - `Python 3.6`_ - `Python 3.7`_ - `Python 3.8`_ +- `Python 3.9`_ -.. _Python 3.5: https://docs.python.org/3.5/ .. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/googleapis/python-redis/blob/master/noxfile.py -Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020. We also explicitly decided to support Python 3 beginning with version -3.5. Reasons for this include: +3.6. Reasons for this include: - Encouraging use of newest versions of Python 3 - Taking the lead of `prominent`_ open-source `projects`_ diff --git a/docs/conf.py b/docs/conf.py index 458660c..0144b23 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -39,6 +39,7 @@ "sphinx.ext.autosummary", "sphinx.ext.intersphinx", "sphinx.ext.coverage", + "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", @@ -344,10 +345,11 @@ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), - "grpc": ("https://grpc.io/grpc/python/", None), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), } diff --git a/noxfile.py b/noxfile.py index 379afd7..a684a53 100644 --- a/noxfile.py +++ b/noxfile.py @@ -72,16 +72,17 @@ def default(session): # Install all test dependencies, then install this package in-place. session.install("asyncmock", "pytest-asyncio") - session.install("mock", "pytest", "pytest-cov") + session.install( + "mock", "pytest", "pytest-cov", + ) session.install("-e", ".") # Run py.test against the unit tests. session.run( "py.test", "--quiet", - "--cov=google.cloud.redis", - "--cov=google.cloud", - "--cov=tests.unit", + "--cov=google/cloud", + "--cov=tests/unit", "--cov-append", "--cov-config=.coveragerc", "--cov-report=", diff --git a/synth.metadata b/synth.metadata index e0ca973..a954d58 100644 --- a/synth.metadata +++ b/synth.metadata @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "fdd03c161003ab97657cc0218f25c82c89ddf4b6" + "sha": "373861061648b5fe5e0ac4f8a38b32d639ee93e4" } } ], @@ -61,6 +61,7 @@ ".kokoro/docs/common.cfg", ".kokoro/docs/docs-presubmit.cfg", ".kokoro/docs/docs.cfg", + ".kokoro/populate-secrets.sh", ".kokoro/presubmit/common.cfg", ".kokoro/presubmit/presubmit.cfg", ".kokoro/publish-docs.sh", @@ -86,6 +87,7 @@ ".kokoro/test-samples.sh", ".kokoro/trampoline.sh", ".kokoro/trampoline_v2.sh", + ".pre-commit-config.yaml", ".trampolinerc", "CODE_OF_CONDUCT.md", "CONTRIBUTING.rst", From 682cc0d2a15ed59230f674f1407f59d86a39a696 Mon Sep 17 00:00:00 2001 From: Justin Beckwith Date: Fri, 29 Jan 2021 08:14:33 -0800 Subject: [PATCH 6/7] build: migrate to flakybot (#47) --- .kokoro/test-samples.sh | 8 ++++---- .kokoro/trampoline_v2.sh | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index 1dddce5..e1608c2 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -87,11 +87,11 @@ for file in samples/**/requirements.txt; do python3.6 -m nox -s "$RUN_TESTS_SESSION" EXIT=$? - # If this is a periodic build, send the test log to the Build Cop Bot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop. + # If this is a periodic build, send the test log to the FlakyBot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop - $KOKORO_GFILE_DIR/linux_amd64/buildcop + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot fi if [[ $EXIT -ne 0 ]]; then diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh index 719bcd5..4af6cdc 100755 --- a/.kokoro/trampoline_v2.sh +++ b/.kokoro/trampoline_v2.sh @@ -159,7 +159,7 @@ if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then "KOKORO_GITHUB_COMMIT" "KOKORO_GITHUB_PULL_REQUEST_NUMBER" "KOKORO_GITHUB_PULL_REQUEST_COMMIT" - # For Build Cop Bot + # For FlakyBot "KOKORO_GITHUB_COMMIT_URL" "KOKORO_GITHUB_PULL_REQUEST_URL" ) From ca435d8e872324e1dc77285033a95a792dcead31 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 1 Mar 2021 09:48:06 -0700 Subject: [PATCH 7/7] chore: release 2.1.0 (#40) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 7 +++++++ setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0c0230c..83789e6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-redis/#history +## [2.1.0](https://www.github.com/googleapis/python-redis/compare/v2.0.0...v2.1.0) (2021-01-29) + + +### Features + +* add common resource helpers; expose client transport; remove send/recv gRPC limits ([#38](https://www.github.com/googleapis/python-redis/issues/38)) ([f3f1a86](https://www.github.com/googleapis/python-redis/commit/f3f1a86a2f14ceeaf22362387b397d9b3f880684)) + ## [2.0.0](https://www.github.com/googleapis/python-redis/compare/v1.0.0...v2.0.0) (2020-09-14) diff --git a/setup.py b/setup.py index e5c5c23..bb46cf0 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-redis" description = "Google Cloud Memorystore for Redis API client library" -version = "2.0.0" +version = "2.1.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta'