diff --git a/.librarian/state.yaml b/.librarian/state.yaml index 75a8e871005a..bf51e78f140b 100644 --- a/.librarian/state.yaml +++ b/.librarian/state.yaml @@ -573,7 +573,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-backupdr version: 0.6.0 - last_generated_commit: a17b84add8318f780fcc8a027815d5fee644b9f7 + last_generated_commit: 94aa4f5ae672addf00e7970ecc47699e34989e90 apis: - path: google/cloud/backupdr/v1 service_config: backupdr_v1.yaml @@ -1201,7 +1201,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-config version: 0.3.0 - last_generated_commit: 3322511885371d2b2253f209ccc3aa60d4100cfd + last_generated_commit: 94aa4f5ae672addf00e7970ecc47699e34989e90 apis: - path: google/cloud/config/v1 service_config: config_v1.yaml @@ -1344,7 +1344,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-databasecenter version: 0.2.0 - last_generated_commit: 9a477cd3c26a704130e2a2fb44a40281d9312e4c + last_generated_commit: 94aa4f5ae672addf00e7970ecc47699e34989e90 apis: - path: google/cloud/databasecenter/v1beta service_config: databasecenter_v1beta.yaml @@ -1879,7 +1879,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-geminidataanalytics version: 0.8.0 - last_generated_commit: 9a477cd3c26a704130e2a2fb44a40281d9312e4c + last_generated_commit: 94aa4f5ae672addf00e7970ecc47699e34989e90 apis: - path: google/cloud/geminidataanalytics/v1beta service_config: geminidataanalytics_v1beta.yaml @@ -2104,7 +2104,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-kms version: 3.8.0 - last_generated_commit: 3322511885371d2b2253f209ccc3aa60d4100cfd + last_generated_commit: 94aa4f5ae672addf00e7970ecc47699e34989e90 apis: - path: google/cloud/kms/v1 service_config: cloudkms_v1.yaml @@ -2446,7 +2446,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-netapp version: 0.6.0 - last_generated_commit: c9ff4f1cd26f1fe63e6d1c11a198366b70ebdb84 + last_generated_commit: 94aa4f5ae672addf00e7970ecc47699e34989e90 apis: - path: google/cloud/netapp/v1 service_config: netapp_v1.yaml @@ -2993,7 +2993,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-run version: 0.14.0 - last_generated_commit: effe5c4fa816021e724ca856d5640f2e55b14a8b + last_generated_commit: 94aa4f5ae672addf00e7970ecc47699e34989e90 apis: - path: google/cloud/run/v2 service_config: run_v2.yaml @@ -4264,7 +4264,7 @@ libraries: tag_format: '{id}-v{version}' - id: googleapis-common-protos version: 1.72.0 - last_generated_commit: d4a34bf03d617723146fe3ae15192c4d93981a27 + last_generated_commit: 94aa4f5ae672addf00e7970ecc47699e34989e90 apis: - path: google/api service_config: serviceconfig.yaml @@ -4291,7 +4291,7 @@ libraries: tag_format: '{id}-v{version}' - id: grafeas version: 1.17.0 - last_generated_commit: e8365a7f88fabe8717cb8322b8ce784b03b6daea + last_generated_commit: 94aa4f5ae672addf00e7970ecc47699e34989e90 apis: - path: grafeas/v1 service_config: grafeas_v1.yaml diff --git a/packages/google-cloud-backupdr/docs/backupdr_v1/backup_dr_protection_summary.rst b/packages/google-cloud-backupdr/docs/backupdr_v1/backup_dr_protection_summary.rst new file mode 100644 index 000000000000..d2f0266aaa72 --- /dev/null +++ b/packages/google-cloud-backupdr/docs/backupdr_v1/backup_dr_protection_summary.rst @@ -0,0 +1,10 @@ +BackupDrProtectionSummary +------------------------------------------- + +.. automodule:: google.cloud.backupdr_v1.services.backup_dr_protection_summary + :members: + :inherited-members: + +.. automodule:: google.cloud.backupdr_v1.services.backup_dr_protection_summary.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-backupdr/docs/backupdr_v1/services_.rst b/packages/google-cloud-backupdr/docs/backupdr_v1/services_.rst index 512dd68a7dda..d8c1fb245953 100644 --- a/packages/google-cloud-backupdr/docs/backupdr_v1/services_.rst +++ b/packages/google-cloud-backupdr/docs/backupdr_v1/services_.rst @@ -4,3 +4,4 @@ Services for Google Cloud Backupdr v1 API :maxdepth: 2 backup_dr + backup_dr_protection_summary diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py b/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py index 2daf0985f7e2..76975c5b1a26 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py @@ -20,6 +20,12 @@ from google.cloud.backupdr_v1.services.backup_dr.async_client import BackupDRAsyncClient from google.cloud.backupdr_v1.services.backup_dr.client import BackupDRClient +from google.cloud.backupdr_v1.services.backup_dr_protection_summary.async_client import ( + BackupDrProtectionSummaryAsyncClient, +) +from google.cloud.backupdr_v1.services.backup_dr_protection_summary.client import ( + BackupDrProtectionSummaryClient, +) from google.cloud.backupdr_v1.types.backupdr import ( CreateManagementServerRequest, DeleteManagementServerRequest, @@ -158,10 +164,23 @@ ListDataSourceReferencesRequest, ListDataSourceReferencesResponse, ) +from google.cloud.backupdr_v1.types.protection_summary import ( + BackupConfigDetails, + BackupDrPlanConfig, + BackupDrPlanRule, + BackupDrTemplateConfig, + BackupLocation, + ListResourceBackupConfigsRequest, + ListResourceBackupConfigsResponse, + PitrSettings, + ResourceBackupConfig, +) __all__ = ( "BackupDRClient", "BackupDRAsyncClient", + "BackupDrProtectionSummaryClient", + "BackupDrProtectionSummaryAsyncClient", "CreateManagementServerRequest", "DeleteManagementServerRequest", "GetManagementServerRequest", @@ -282,4 +301,13 @@ "GetDataSourceReferenceRequest", "ListDataSourceReferencesRequest", "ListDataSourceReferencesResponse", + "BackupConfigDetails", + "BackupDrPlanConfig", + "BackupDrPlanRule", + "BackupDrTemplateConfig", + "BackupLocation", + "ListResourceBackupConfigsRequest", + "ListResourceBackupConfigsResponse", + "PitrSettings", + "ResourceBackupConfig", ) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py index e09e5c45d7d6..a2752e102651 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py @@ -29,6 +29,10 @@ import importlib_metadata as metadata from .services.backup_dr import BackupDRAsyncClient, BackupDRClient +from .services.backup_dr_protection_summary import ( + BackupDrProtectionSummaryAsyncClient, + BackupDrProtectionSummaryClient, +) from .types.backupdr import ( CreateManagementServerRequest, DeleteManagementServerRequest, @@ -165,6 +169,17 @@ ListDataSourceReferencesRequest, ListDataSourceReferencesResponse, ) +from .types.protection_summary import ( + BackupConfigDetails, + BackupDrPlanConfig, + BackupDrPlanRule, + BackupDrTemplateConfig, + BackupLocation, + ListResourceBackupConfigsRequest, + ListResourceBackupConfigsResponse, + PitrSettings, + ResourceBackupConfig, +) if hasattr(api_core, "check_python_version") and hasattr( api_core, "check_dependency_versions" @@ -262,6 +277,7 @@ def _get_version(dependency_name): __all__ = ( "BackupDRAsyncClient", + "BackupDrProtectionSummaryAsyncClient", "AcceleratorConfig", "AccessConfig", "AdvancedMachineFeatures", @@ -272,10 +288,16 @@ def _get_version(dependency_name): "BackupApplianceBackupConfig", "BackupApplianceBackupProperties", "BackupApplianceLockInfo", + "BackupConfigDetails", "BackupConfigInfo", "BackupConfigState", "BackupDRClient", + "BackupDrPlanConfig", + "BackupDrPlanRule", + "BackupDrProtectionSummaryClient", + "BackupDrTemplateConfig", "BackupGcpResource", + "BackupLocation", "BackupLock", "BackupPlan", "BackupPlanAssociation", @@ -356,6 +378,8 @@ def _get_version(dependency_name): "ListDataSourcesResponse", "ListManagementServersRequest", "ListManagementServersResponse", + "ListResourceBackupConfigsRequest", + "ListResourceBackupConfigsResponse", "ManagementServer", "ManagementURI", "Metadata", @@ -363,7 +387,9 @@ def _get_version(dependency_name): "NetworkInterface", "NetworkPerformanceConfig", "OperationMetadata", + "PitrSettings", "RegionDiskTargetEnvironment", + "ResourceBackupConfig", "RestoreBackupRequest", "RestoreBackupResponse", "RuleConfigInfo", diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json index 7e740c7d1390..83a163f22b26 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json @@ -578,6 +578,40 @@ } } } + }, + "BackupDrProtectionSummary": { + "clients": { + "grpc": { + "libraryClient": "BackupDrProtectionSummaryClient", + "rpcs": { + "ListResourceBackupConfigs": { + "methods": [ + "list_resource_backup_configs" + ] + } + } + }, + "grpc-async": { + "libraryClient": "BackupDrProtectionSummaryAsyncClient", + "rpcs": { + "ListResourceBackupConfigs": { + "methods": [ + "list_resource_backup_configs" + ] + } + } + }, + "rest": { + "libraryClient": "BackupDrProtectionSummaryClient", + "rpcs": { + "ListResourceBackupConfigs": { + "methods": [ + "list_resource_backup_configs" + ] + } + } + } + } } } } diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/__init__.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/__init__.py new file mode 100644 index 000000000000..87826b6ddc50 --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import BackupDrProtectionSummaryAsyncClient +from .client import BackupDrProtectionSummaryClient + +__all__ = ( + "BackupDrProtectionSummaryClient", + "BackupDrProtectionSummaryAsyncClient", +) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/async_client.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/async_client.py new file mode 100644 index 000000000000..1725d3b6b4b4 --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/async_client.py @@ -0,0 +1,1093 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import logging as std_logging +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.backupdr_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.backupdr_v1.services.backup_dr_protection_summary import pagers +from google.cloud.backupdr_v1.types import protection_summary + +from .client import BackupDrProtectionSummaryClient +from .transports.base import DEFAULT_CLIENT_INFO, BackupDrProtectionSummaryTransport +from .transports.grpc_asyncio import BackupDrProtectionSummaryGrpcAsyncIOTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class BackupDrProtectionSummaryAsyncClient: + """The Protection Summary service.""" + + _client: BackupDrProtectionSummaryClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = BackupDrProtectionSummaryClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = BackupDrProtectionSummaryClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ( + BackupDrProtectionSummaryClient._DEFAULT_ENDPOINT_TEMPLATE + ) + _DEFAULT_UNIVERSE = BackupDrProtectionSummaryClient._DEFAULT_UNIVERSE + + resource_backup_config_path = staticmethod( + BackupDrProtectionSummaryClient.resource_backup_config_path + ) + parse_resource_backup_config_path = staticmethod( + BackupDrProtectionSummaryClient.parse_resource_backup_config_path + ) + common_billing_account_path = staticmethod( + BackupDrProtectionSummaryClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + BackupDrProtectionSummaryClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod( + BackupDrProtectionSummaryClient.common_folder_path + ) + parse_common_folder_path = staticmethod( + BackupDrProtectionSummaryClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + BackupDrProtectionSummaryClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + BackupDrProtectionSummaryClient.parse_common_organization_path + ) + common_project_path = staticmethod( + BackupDrProtectionSummaryClient.common_project_path + ) + parse_common_project_path = staticmethod( + BackupDrProtectionSummaryClient.parse_common_project_path + ) + common_location_path = staticmethod( + BackupDrProtectionSummaryClient.common_location_path + ) + parse_common_location_path = staticmethod( + BackupDrProtectionSummaryClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BackupDrProtectionSummaryAsyncClient: The constructed client. + """ + return BackupDrProtectionSummaryClient.from_service_account_info.__func__(BackupDrProtectionSummaryAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BackupDrProtectionSummaryAsyncClient: The constructed client. + """ + return BackupDrProtectionSummaryClient.from_service_account_file.__func__(BackupDrProtectionSummaryAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return BackupDrProtectionSummaryClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> BackupDrProtectionSummaryTransport: + """Returns the transport used by the client instance. + + Returns: + BackupDrProtectionSummaryTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = BackupDrProtectionSummaryClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + BackupDrProtectionSummaryTransport, + Callable[..., BackupDrProtectionSummaryTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the backup dr protection summary async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,BackupDrProtectionSummaryTransport,Callable[..., BackupDrProtectionSummaryTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the BackupDrProtectionSummaryTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = BackupDrProtectionSummaryClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.backupdr_v1.BackupDrProtectionSummaryAsyncClient`.", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDrProtectionSummary", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.backupdr.v1.BackupDrProtectionSummary", + "credentialsType": None, + }, + ) + + async def list_resource_backup_configs( + self, + request: Optional[ + Union[protection_summary.ListResourceBackupConfigsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListResourceBackupConfigsAsyncPager: + r"""Lists ResourceBackupConfigs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_list_resource_backup_configs(): + # Create a client + client = backupdr_v1.BackupDrProtectionSummaryAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListResourceBackupConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_resource_backup_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.ListResourceBackupConfigsRequest, dict]]): + The request object. Request for + ListResourceBackupConfigs. + parent (:class:`str`): + Required. The project and location for which to retrieve + resource backup configs. Format: + 'projects/{project_id}/locations/{location}'. In Google + Cloud Backup and DR, locations map to Google Cloud + regions, for example **us-central1**. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.backupdr_v1.services.backup_dr_protection_summary.pagers.ListResourceBackupConfigsAsyncPager: + Response for + ListResourceBackupConfigs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, protection_summary.ListResourceBackupConfigsRequest): + request = protection_summary.ListResourceBackupConfigsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_resource_backup_configs + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListResourceBackupConfigsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[ + self._client._transport.test_iam_permissions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "BackupDrProtectionSummaryAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ("BackupDrProtectionSummaryAsyncClient",) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/client.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/client.py new file mode 100644 index 000000000000..0181cd69131e --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/client.py @@ -0,0 +1,1558 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.backupdr_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.backupdr_v1.services.backup_dr_protection_summary import pagers +from google.cloud.backupdr_v1.types import protection_summary + +from .transports.base import DEFAULT_CLIENT_INFO, BackupDrProtectionSummaryTransport +from .transports.grpc import BackupDrProtectionSummaryGrpcTransport +from .transports.grpc_asyncio import BackupDrProtectionSummaryGrpcAsyncIOTransport +from .transports.rest import BackupDrProtectionSummaryRestTransport + + +class BackupDrProtectionSummaryClientMeta(type): + """Metaclass for the BackupDrProtectionSummary client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[BackupDrProtectionSummaryTransport]] + _transport_registry["grpc"] = BackupDrProtectionSummaryGrpcTransport + _transport_registry["grpc_asyncio"] = BackupDrProtectionSummaryGrpcAsyncIOTransport + _transport_registry["rest"] = BackupDrProtectionSummaryRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[BackupDrProtectionSummaryTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class BackupDrProtectionSummaryClient(metaclass=BackupDrProtectionSummaryClientMeta): + """The Protection Summary service.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "backupdr.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "backupdr.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @staticmethod + def _use_client_cert_effective(): + """Returns whether client certificate should be used for mTLS if the + google-auth version supports should_use_client_cert automatic mTLS enablement. + + Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var. + + Returns: + bool: whether client certificate should be used for mTLS + Raises: + ValueError: (If using a version of google-auth without should_use_client_cert and + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + """ + # check if google-auth version supports should_use_client_cert for automatic mTLS enablement + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + return mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + return use_client_cert_str == "true" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BackupDrProtectionSummaryClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BackupDrProtectionSummaryClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> BackupDrProtectionSummaryTransport: + """Returns the transport used by the client instance. + + Returns: + BackupDrProtectionSummaryTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def resource_backup_config_path( + project: str, + location: str, + resource_backup_config: str, + ) -> str: + """Returns a fully-qualified resource_backup_config string.""" + return "projects/{project}/locations/{location}/resourceBackupConfigs/{resource_backup_config}".format( + project=project, + location=location, + resource_backup_config=resource_backup_config, + ) + + @staticmethod + def parse_resource_backup_config_path(path: str) -> Dict[str, str]: + """Parses a resource_backup_config path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/resourceBackupConfigs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = BackupDrProtectionSummaryClient._use_client_cert_effective() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert: + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = BackupDrProtectionSummaryClient._use_client_cert_effective() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert, use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = BackupDrProtectionSummaryClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = BackupDrProtectionSummaryClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + BackupDrProtectionSummaryClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = BackupDrProtectionSummaryClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + BackupDrProtectionSummaryTransport, + Callable[..., BackupDrProtectionSummaryTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the backup dr protection summary client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,BackupDrProtectionSummaryTransport,Callable[..., BackupDrProtectionSummaryTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the BackupDrProtectionSummaryTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = BackupDrProtectionSummaryClient._read_environment_variables() + self._client_cert_source = ( + BackupDrProtectionSummaryClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = BackupDrProtectionSummaryClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, BackupDrProtectionSummaryTransport) + if transport_provided: + # transport is a BackupDrProtectionSummaryTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(BackupDrProtectionSummaryTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or BackupDrProtectionSummaryClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[BackupDrProtectionSummaryTransport], + Callable[..., BackupDrProtectionSummaryTransport], + ] = ( + BackupDrProtectionSummaryClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., BackupDrProtectionSummaryTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.backupdr_v1.BackupDrProtectionSummaryClient`.", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDrProtectionSummary", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.backupdr.v1.BackupDrProtectionSummary", + "credentialsType": None, + }, + ) + + def list_resource_backup_configs( + self, + request: Optional[ + Union[protection_summary.ListResourceBackupConfigsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListResourceBackupConfigsPager: + r"""Lists ResourceBackupConfigs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_list_resource_backup_configs(): + # Create a client + client = backupdr_v1.BackupDrProtectionSummaryClient() + + # Initialize request argument(s) + request = backupdr_v1.ListResourceBackupConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_resource_backup_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.ListResourceBackupConfigsRequest, dict]): + The request object. Request for + ListResourceBackupConfigs. + parent (str): + Required. The project and location for which to retrieve + resource backup configs. Format: + 'projects/{project_id}/locations/{location}'. In Google + Cloud Backup and DR, locations map to Google Cloud + regions, for example **us-central1**. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.backupdr_v1.services.backup_dr_protection_summary.pagers.ListResourceBackupConfigsPager: + Response for + ListResourceBackupConfigs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, protection_summary.ListResourceBackupConfigsRequest): + request = protection_summary.ListResourceBackupConfigsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_resource_backup_configs + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListResourceBackupConfigsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "BackupDrProtectionSummaryClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ("BackupDrProtectionSummaryClient",) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/pagers.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/pagers.py new file mode 100644 index 000000000000..ce119ada6e5e --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/pagers.py @@ -0,0 +1,201 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.backupdr_v1.types import protection_summary + + +class ListResourceBackupConfigsPager: + """A pager for iterating through ``list_resource_backup_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListResourceBackupConfigsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``resource_backup_configs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListResourceBackupConfigs`` requests and continue to iterate + through the ``resource_backup_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListResourceBackupConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., protection_summary.ListResourceBackupConfigsResponse], + request: protection_summary.ListResourceBackupConfigsRequest, + response: protection_summary.ListResourceBackupConfigsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListResourceBackupConfigsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListResourceBackupConfigsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = protection_summary.ListResourceBackupConfigsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[protection_summary.ListResourceBackupConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[protection_summary.ResourceBackupConfig]: + for page in self.pages: + yield from page.resource_backup_configs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListResourceBackupConfigsAsyncPager: + """A pager for iterating through ``list_resource_backup_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListResourceBackupConfigsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``resource_backup_configs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListResourceBackupConfigs`` requests and continue to iterate + through the ``resource_backup_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListResourceBackupConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[protection_summary.ListResourceBackupConfigsResponse] + ], + request: protection_summary.ListResourceBackupConfigsRequest, + response: protection_summary.ListResourceBackupConfigsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListResourceBackupConfigsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListResourceBackupConfigsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = protection_summary.ListResourceBackupConfigsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[protection_summary.ListResourceBackupConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[protection_summary.ResourceBackupConfig]: + async def async_generator(): + async for page in self.pages: + for response in page.resource_backup_configs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/transports/README.rst b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/transports/README.rst new file mode 100644 index 000000000000..7ae8c4a8e36c --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`BackupDrProtectionSummaryTransport` is the ABC for all transports. +- public child `BackupDrProtectionSummaryGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `BackupDrProtectionSummaryGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseBackupDrProtectionSummaryRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `BackupDrProtectionSummaryRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/transports/__init__.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/transports/__init__.py new file mode 100644 index 000000000000..103fc6a34295 --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import BackupDrProtectionSummaryTransport +from .grpc import BackupDrProtectionSummaryGrpcTransport +from .grpc_asyncio import BackupDrProtectionSummaryGrpcAsyncIOTransport +from .rest import ( + BackupDrProtectionSummaryRestInterceptor, + BackupDrProtectionSummaryRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[BackupDrProtectionSummaryTransport]] +_transport_registry["grpc"] = BackupDrProtectionSummaryGrpcTransport +_transport_registry["grpc_asyncio"] = BackupDrProtectionSummaryGrpcAsyncIOTransport +_transport_registry["rest"] = BackupDrProtectionSummaryRestTransport + +__all__ = ( + "BackupDrProtectionSummaryTransport", + "BackupDrProtectionSummaryGrpcTransport", + "BackupDrProtectionSummaryGrpcAsyncIOTransport", + "BackupDrProtectionSummaryRestTransport", + "BackupDrProtectionSummaryRestInterceptor", +) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/transports/base.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/transports/base.py new file mode 100644 index 000000000000..c85afabda277 --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/transports/base.py @@ -0,0 +1,311 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.backupdr_v1 import gapic_version as package_version +from google.cloud.backupdr_v1.types import protection_summary + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class BackupDrProtectionSummaryTransport(abc.ABC): + """Abstract transport class for BackupDrProtectionSummary.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "backupdr.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'backupdr.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_resource_backup_configs: gapic_v1.method.wrap_method( + self.list_resource_backup_configs, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_resource_backup_configs( + self, + ) -> Callable[ + [protection_summary.ListResourceBackupConfigsRequest], + Union[ + protection_summary.ListResourceBackupConfigsResponse, + Awaitable[protection_summary.ListResourceBackupConfigsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("BackupDrProtectionSummaryTransport",) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/transports/grpc.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/transports/grpc.py new file mode 100644 index 000000000000..f51a998d9a33 --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/transports/grpc.py @@ -0,0 +1,553 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.backupdr_v1.types import protection_summary + +from .base import DEFAULT_CLIENT_INFO, BackupDrProtectionSummaryTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDrProtectionSummary", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDrProtectionSummary", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class BackupDrProtectionSummaryGrpcTransport(BackupDrProtectionSummaryTransport): + """gRPC backend transport for BackupDrProtectionSummary. + + The Protection Summary service. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "backupdr.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'backupdr.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "backupdr.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_resource_backup_configs( + self, + ) -> Callable[ + [protection_summary.ListResourceBackupConfigsRequest], + protection_summary.ListResourceBackupConfigsResponse, + ]: + r"""Return a callable for the list resource backup configs method over gRPC. + + Lists ResourceBackupConfigs. + + Returns: + Callable[[~.ListResourceBackupConfigsRequest], + ~.ListResourceBackupConfigsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_resource_backup_configs" not in self._stubs: + self._stubs[ + "list_resource_backup_configs" + ] = self._logged_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDrProtectionSummary/ListResourceBackupConfigs", + request_serializer=protection_summary.ListResourceBackupConfigsRequest.serialize, + response_deserializer=protection_summary.ListResourceBackupConfigsResponse.deserialize, + ) + return self._stubs["list_resource_backup_configs"] + + def close(self): + self._logged_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("BackupDrProtectionSummaryGrpcTransport",) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/transports/grpc_asyncio.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/transports/grpc_asyncio.py new file mode 100644 index 000000000000..1895493143cb --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/transports/grpc_asyncio.py @@ -0,0 +1,630 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import logging as std_logging +import pickle +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +from grpc.experimental import aio # type: ignore +import proto # type: ignore + +from google.cloud.backupdr_v1.types import protection_summary + +from .base import DEFAULT_CLIENT_INFO, BackupDrProtectionSummaryTransport +from .grpc import BackupDrProtectionSummaryGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDrProtectionSummary", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDrProtectionSummary", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class BackupDrProtectionSummaryGrpcAsyncIOTransport(BackupDrProtectionSummaryTransport): + """gRPC AsyncIO backend transport for BackupDrProtectionSummary. + + The Protection Summary service. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "backupdr.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "backupdr.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'backupdr.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_resource_backup_configs( + self, + ) -> Callable[ + [protection_summary.ListResourceBackupConfigsRequest], + Awaitable[protection_summary.ListResourceBackupConfigsResponse], + ]: + r"""Return a callable for the list resource backup configs method over gRPC. + + Lists ResourceBackupConfigs. + + Returns: + Callable[[~.ListResourceBackupConfigsRequest], + Awaitable[~.ListResourceBackupConfigsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_resource_backup_configs" not in self._stubs: + self._stubs[ + "list_resource_backup_configs" + ] = self._logged_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDrProtectionSummary/ListResourceBackupConfigs", + request_serializer=protection_summary.ListResourceBackupConfigsRequest.serialize, + response_deserializer=protection_summary.ListResourceBackupConfigsResponse.deserialize, + ) + return self._stubs["list_resource_backup_configs"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_resource_backup_configs: self._wrap_method( + self.list_resource_backup_configs, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: self._wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: self._wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: self._wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("BackupDrProtectionSummaryGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/transports/rest.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/transports/rest.py new file mode 100644 index 000000000000..0230d3ef62c5 --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/transports/rest.py @@ -0,0 +1,1882 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, rest_helpers, rest_streaming +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import google.protobuf +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.cloud.backupdr_v1.types import protection_summary + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseBackupDrProtectionSummaryRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class BackupDrProtectionSummaryRestInterceptor: + """Interceptor for BackupDrProtectionSummary. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the BackupDrProtectionSummaryRestTransport. + + .. code-block:: python + class MyCustomBackupDrProtectionSummaryInterceptor(BackupDrProtectionSummaryRestInterceptor): + def pre_list_resource_backup_configs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_resource_backup_configs(self, response): + logging.log(f"Received response: {response}") + return response + + transport = BackupDrProtectionSummaryRestTransport(interceptor=MyCustomBackupDrProtectionSummaryInterceptor()) + client = BackupDrProtectionSummaryClient(transport=transport) + + + """ + + def pre_list_resource_backup_configs( + self, + request: protection_summary.ListResourceBackupConfigsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + protection_summary.ListResourceBackupConfigsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_resource_backup_configs + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDrProtectionSummary server. + """ + return request, metadata + + def post_list_resource_backup_configs( + self, response: protection_summary.ListResourceBackupConfigsResponse + ) -> protection_summary.ListResourceBackupConfigsResponse: + """Post-rpc interceptor for list_resource_backup_configs + + DEPRECATED. Please use the `post_list_resource_backup_configs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BackupDrProtectionSummary server but before + it is returned to user code. This `post_list_resource_backup_configs` interceptor runs + before the `post_list_resource_backup_configs_with_metadata` interceptor. + """ + return response + + def post_list_resource_backup_configs_with_metadata( + self, + response: protection_summary.ListResourceBackupConfigsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + protection_summary.ListResourceBackupConfigsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_resource_backup_configs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDrProtectionSummary server but before it is returned to user code. + + We recommend only using this `post_list_resource_backup_configs_with_metadata` + interceptor in new development instead of the `post_list_resource_backup_configs` interceptor. + When both interceptors are used, this `post_list_resource_backup_configs_with_metadata` interceptor runs after the + `post_list_resource_backup_configs` interceptor. The (possibly modified) response returned by + `post_list_resource_backup_configs` will be passed to + `post_list_resource_backup_configs_with_metadata`. + """ + return response, metadata + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDrProtectionSummary server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the BackupDrProtectionSummary server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDrProtectionSummary server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the BackupDrProtectionSummary server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDrProtectionSummary server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the BackupDrProtectionSummary server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDrProtectionSummary server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the BackupDrProtectionSummary server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.TestIamPermissionsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDrProtectionSummary server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the BackupDrProtectionSummary server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDrProtectionSummary server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the BackupDrProtectionSummary server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDrProtectionSummary server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the BackupDrProtectionSummary server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDrProtectionSummary server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the BackupDrProtectionSummary server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDrProtectionSummary server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the BackupDrProtectionSummary server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class BackupDrProtectionSummaryRestStub: + _session: AuthorizedSession + _host: str + _interceptor: BackupDrProtectionSummaryRestInterceptor + + +class BackupDrProtectionSummaryRestTransport( + _BaseBackupDrProtectionSummaryRestTransport +): + """REST backend synchronous transport for BackupDrProtectionSummary. + + The Protection Summary service. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "backupdr.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[BackupDrProtectionSummaryRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'backupdr.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or BackupDrProtectionSummaryRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _ListResourceBackupConfigs( + _BaseBackupDrProtectionSummaryRestTransport._BaseListResourceBackupConfigs, + BackupDrProtectionSummaryRestStub, + ): + def __hash__(self): + return hash( + "BackupDrProtectionSummaryRestTransport.ListResourceBackupConfigs" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: protection_summary.ListResourceBackupConfigsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> protection_summary.ListResourceBackupConfigsResponse: + r"""Call the list resource backup + configs method over HTTP. + + Args: + request (~.protection_summary.ListResourceBackupConfigsRequest): + The request object. Request for + ListResourceBackupConfigs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.protection_summary.ListResourceBackupConfigsResponse: + Response for + ListResourceBackupConfigs. + + """ + + http_options = ( + _BaseBackupDrProtectionSummaryRestTransport._BaseListResourceBackupConfigs._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_resource_backup_configs( + request, metadata + ) + transcoded_request = _BaseBackupDrProtectionSummaryRestTransport._BaseListResourceBackupConfigs._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBackupDrProtectionSummaryRestTransport._BaseListResourceBackupConfigs._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.backupdr_v1.BackupDrProtectionSummaryClient.ListResourceBackupConfigs", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDrProtectionSummary", + "rpcName": "ListResourceBackupConfigs", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BackupDrProtectionSummaryRestTransport._ListResourceBackupConfigs._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = protection_summary.ListResourceBackupConfigsResponse() + pb_resp = protection_summary.ListResourceBackupConfigsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_resource_backup_configs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_resource_backup_configs_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + protection_summary.ListResourceBackupConfigsResponse.to_json( + response + ) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.backupdr_v1.BackupDrProtectionSummaryClient.list_resource_backup_configs", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDrProtectionSummary", + "rpcName": "ListResourceBackupConfigs", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def list_resource_backup_configs( + self, + ) -> Callable[ + [protection_summary.ListResourceBackupConfigsRequest], + protection_summary.ListResourceBackupConfigsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListResourceBackupConfigs(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation( + _BaseBackupDrProtectionSummaryRestTransport._BaseGetLocation, + BackupDrProtectionSummaryRestStub, + ): + def __hash__(self): + return hash("BackupDrProtectionSummaryRestTransport.GetLocation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options = ( + _BaseBackupDrProtectionSummaryRestTransport._BaseGetLocation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_location(request, metadata) + transcoded_request = _BaseBackupDrProtectionSummaryRestTransport._BaseGetLocation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBackupDrProtectionSummaryRestTransport._BaseGetLocation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.backupdr_v1.BackupDrProtectionSummaryClient.GetLocation", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDrProtectionSummary", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + BackupDrProtectionSummaryRestTransport._GetLocation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.Location() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.backupdr_v1.BackupDrProtectionSummaryAsyncClient.GetLocation", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDrProtectionSummary", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations( + _BaseBackupDrProtectionSummaryRestTransport._BaseListLocations, + BackupDrProtectionSummaryRestStub, + ): + def __hash__(self): + return hash("BackupDrProtectionSummaryRestTransport.ListLocations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options = ( + _BaseBackupDrProtectionSummaryRestTransport._BaseListLocations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + transcoded_request = _BaseBackupDrProtectionSummaryRestTransport._BaseListLocations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBackupDrProtectionSummaryRestTransport._BaseListLocations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.backupdr_v1.BackupDrProtectionSummaryClient.ListLocations", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDrProtectionSummary", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + BackupDrProtectionSummaryRestTransport._ListLocations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.backupdr_v1.BackupDrProtectionSummaryAsyncClient.ListLocations", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDrProtectionSummary", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy( + _BaseBackupDrProtectionSummaryRestTransport._BaseGetIamPolicy, + BackupDrProtectionSummaryRestStub, + ): + def __hash__(self): + return hash("BackupDrProtectionSummaryRestTransport.GetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options = ( + _BaseBackupDrProtectionSummaryRestTransport._BaseGetIamPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + transcoded_request = _BaseBackupDrProtectionSummaryRestTransport._BaseGetIamPolicy._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBackupDrProtectionSummaryRestTransport._BaseGetIamPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.backupdr_v1.BackupDrProtectionSummaryClient.GetIamPolicy", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDrProtectionSummary", + "rpcName": "GetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + BackupDrProtectionSummaryRestTransport._GetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = policy_pb2.Policy() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.backupdr_v1.BackupDrProtectionSummaryAsyncClient.GetIamPolicy", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDrProtectionSummary", + "rpcName": "GetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy( + _BaseBackupDrProtectionSummaryRestTransport._BaseSetIamPolicy, + BackupDrProtectionSummaryRestStub, + ): + def __hash__(self): + return hash("BackupDrProtectionSummaryRestTransport.SetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options = ( + _BaseBackupDrProtectionSummaryRestTransport._BaseSetIamPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + transcoded_request = _BaseBackupDrProtectionSummaryRestTransport._BaseSetIamPolicy._get_transcoded_request( + http_options, request + ) + + body = _BaseBackupDrProtectionSummaryRestTransport._BaseSetIamPolicy._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseBackupDrProtectionSummaryRestTransport._BaseSetIamPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.backupdr_v1.BackupDrProtectionSummaryClient.SetIamPolicy", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDrProtectionSummary", + "rpcName": "SetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + BackupDrProtectionSummaryRestTransport._SetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = policy_pb2.Policy() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_set_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.backupdr_v1.BackupDrProtectionSummaryAsyncClient.SetIamPolicy", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDrProtectionSummary", + "rpcName": "SetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions( + _BaseBackupDrProtectionSummaryRestTransport._BaseTestIamPermissions, + BackupDrProtectionSummaryRestStub, + ): + def __hash__(self): + return hash("BackupDrProtectionSummaryRestTransport.TestIamPermissions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options = ( + _BaseBackupDrProtectionSummaryRestTransport._BaseTestIamPermissions._get_http_options() + ) + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + transcoded_request = _BaseBackupDrProtectionSummaryRestTransport._BaseTestIamPermissions._get_transcoded_request( + http_options, request + ) + + body = _BaseBackupDrProtectionSummaryRestTransport._BaseTestIamPermissions._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseBackupDrProtectionSummaryRestTransport._BaseTestIamPermissions._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.backupdr_v1.BackupDrProtectionSummaryClient.TestIamPermissions", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDrProtectionSummary", + "rpcName": "TestIamPermissions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BackupDrProtectionSummaryRestTransport._TestIamPermissions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_test_iam_permissions(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.backupdr_v1.BackupDrProtectionSummaryAsyncClient.TestIamPermissions", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDrProtectionSummary", + "rpcName": "TestIamPermissions", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation( + _BaseBackupDrProtectionSummaryRestTransport._BaseCancelOperation, + BackupDrProtectionSummaryRestStub, + ): + def __hash__(self): + return hash("BackupDrProtectionSummaryRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseBackupDrProtectionSummaryRestTransport._BaseCancelOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + transcoded_request = _BaseBackupDrProtectionSummaryRestTransport._BaseCancelOperation._get_transcoded_request( + http_options, request + ) + + body = _BaseBackupDrProtectionSummaryRestTransport._BaseCancelOperation._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseBackupDrProtectionSummaryRestTransport._BaseCancelOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.backupdr_v1.BackupDrProtectionSummaryClient.CancelOperation", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDrProtectionSummary", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + BackupDrProtectionSummaryRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation( + _BaseBackupDrProtectionSummaryRestTransport._BaseDeleteOperation, + BackupDrProtectionSummaryRestStub, + ): + def __hash__(self): + return hash("BackupDrProtectionSummaryRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseBackupDrProtectionSummaryRestTransport._BaseDeleteOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + transcoded_request = _BaseBackupDrProtectionSummaryRestTransport._BaseDeleteOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBackupDrProtectionSummaryRestTransport._BaseDeleteOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.backupdr_v1.BackupDrProtectionSummaryClient.DeleteOperation", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDrProtectionSummary", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + BackupDrProtectionSummaryRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation( + _BaseBackupDrProtectionSummaryRestTransport._BaseGetOperation, + BackupDrProtectionSummaryRestStub, + ): + def __hash__(self): + return hash("BackupDrProtectionSummaryRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = ( + _BaseBackupDrProtectionSummaryRestTransport._BaseGetOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseBackupDrProtectionSummaryRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBackupDrProtectionSummaryRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.backupdr_v1.BackupDrProtectionSummaryClient.GetOperation", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDrProtectionSummary", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + BackupDrProtectionSummaryRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.backupdr_v1.BackupDrProtectionSummaryAsyncClient.GetOperation", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDrProtectionSummary", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations( + _BaseBackupDrProtectionSummaryRestTransport._BaseListOperations, + BackupDrProtectionSummaryRestStub, + ): + def __hash__(self): + return hash("BackupDrProtectionSummaryRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = ( + _BaseBackupDrProtectionSummaryRestTransport._BaseListOperations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseBackupDrProtectionSummaryRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBackupDrProtectionSummaryRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.backupdr_v1.BackupDrProtectionSummaryClient.ListOperations", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDrProtectionSummary", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + BackupDrProtectionSummaryRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.backupdr_v1.BackupDrProtectionSummaryAsyncClient.ListOperations", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDrProtectionSummary", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("BackupDrProtectionSummaryRestTransport",) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/transports/rest_base.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/transports/rest_base.py new file mode 100644 index 000000000000..9652ff7ba879 --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr_protection_summary/transports/rest_base.py @@ -0,0 +1,385 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format + +from google.cloud.backupdr_v1.types import protection_summary + +from .base import DEFAULT_CLIENT_INFO, BackupDrProtectionSummaryTransport + + +class _BaseBackupDrProtectionSummaryRestTransport(BackupDrProtectionSummaryTransport): + """Base REST backend transport for BackupDrProtectionSummary. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "backupdr.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'backupdr.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseListResourceBackupConfigs: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/resourceBackupConfigs", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = protection_summary.ListResourceBackupConfigsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBackupDrProtectionSummaryRestTransport._BaseListResourceBackupConfigs._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{resource=projects/*/locations/*/managementServers/*}:getIamPolicy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/managementServers/*}:setIamPolicy", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/managementServers/*}:testIamPermissions", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseBackupDrProtectionSummaryRestTransport",) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py index 19a484a7e061..93036c68775b 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py @@ -149,6 +149,17 @@ ListDataSourceReferencesRequest, ListDataSourceReferencesResponse, ) +from .protection_summary import ( + BackupConfigDetails, + BackupDrPlanConfig, + BackupDrPlanRule, + BackupDrTemplateConfig, + BackupLocation, + ListResourceBackupConfigsRequest, + ListResourceBackupConfigsResponse, + PitrSettings, + ResourceBackupConfig, +) __all__ = ( "CreateManagementServerRequest", @@ -271,4 +282,13 @@ "GetDataSourceReferenceRequest", "ListDataSourceReferencesRequest", "ListDataSourceReferencesResponse", + "BackupConfigDetails", + "BackupDrPlanConfig", + "BackupDrPlanRule", + "BackupDrTemplateConfig", + "BackupLocation", + "ListResourceBackupConfigsRequest", + "ListResourceBackupConfigsResponse", + "PitrSettings", + "ResourceBackupConfig", ) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/protection_summary.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/protection_summary.py new file mode 100644 index 000000000000..a2d6b637d386 --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/protection_summary.py @@ -0,0 +1,513 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.backupdr.v1", + manifest={ + "ListResourceBackupConfigsRequest", + "ListResourceBackupConfigsResponse", + "ResourceBackupConfig", + "BackupConfigDetails", + "PitrSettings", + "BackupDrTemplateConfig", + "BackupDrPlanConfig", + "BackupDrPlanRule", + "BackupLocation", + }, +) + + +class ListResourceBackupConfigsRequest(proto.Message): + r"""Request for ListResourceBackupConfigs. + + Attributes: + parent (str): + Required. The project and location for which to retrieve + resource backup configs. Format: + 'projects/{project_id}/locations/{location}'. In Google + Cloud Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will use 100 as default. + Maximum value is 500 and values above 500 will + be coerced to 500. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering results. + order_by (str): + Optional. Hint for how to order the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListResourceBackupConfigsResponse(proto.Message): + r"""Response for ListResourceBackupConfigs. + + Attributes: + resource_backup_configs (MutableSequence[google.cloud.backupdr_v1.types.ResourceBackupConfig]): + The list of ResourceBackupConfigs for the + specified scope. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + resource_backup_configs: MutableSequence[ + "ResourceBackupConfig" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ResourceBackupConfig", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ResourceBackupConfig(proto.Message): + r"""ResourceBackupConfig represents a resource along with its + backup configurations. + + Attributes: + name (str): + Identifier. The resource name of the + ResourceBackupConfig. Format: + + projects/{project}/locations/{location}/resourceBackupConfigs/{uid} + uid (str): + Output only. The unique identifier of the + resource backup config. + target_resource (str): + Output only. The `full resource + name `__ + of the cloud resource that this configuration applies to. + Supported resource types are + [ResourceBackupConfig.ResourceType][google.cloud.backupdr.v1.ResourceBackupConfig.ResourceType]. + target_resource_display_name (str): + Output only. The human friendly name of the + target resource. + target_resource_type (google.cloud.backupdr_v1.types.ResourceBackupConfig.ResourceType): + Output only. The type of the target resource. + target_resource_labels (MutableMapping[str, str]): + Labels associated with the target resource. + backup_configs_details (MutableSequence[google.cloud.backupdr_v1.types.BackupConfigDetails]): + Backup configurations applying to the target + resource, including those targeting its + related/child resources. For example, backup + configuration applicable to Compute Engine disks + will be populated in this field for a Compute + Engine VM which has the disk associated. + backup_configured (bool): + Output only. Whether the target resource is configured for + backup. This is true if the backup_configs_details is not + empty. + vaulted (bool): + Output only. Whether the target resource is protected by a + backup vault. This is true if the backup_configs_details is + not empty and any of the + [ResourceBackupConfig.backup_configs_details][google.cloud.backupdr.v1.ResourceBackupConfig.backup_configs_details] + has a backup configuration with + [BackupConfigDetails.backup_vault][google.cloud.backupdr.v1.BackupConfigDetails.backup_vault] + set. set. + """ + + class ResourceType(proto.Enum): + r"""The type of the cloud resource. + + Values: + RESOURCE_TYPE_UNSPECIFIED (0): + Resource type not set. + CLOUD_SQL_INSTANCE (1): + Cloud SQL instance. + COMPUTE_ENGINE_VM (2): + Compute Engine VM. + COMPUTE_ENGINE_DISK (3): + Compute Engine Disk. + COMPUTE_ENGINE_REGIONAL_DISK (4): + Compute Engine Regional Disk. + """ + RESOURCE_TYPE_UNSPECIFIED = 0 + CLOUD_SQL_INSTANCE = 1 + COMPUTE_ENGINE_VM = 2 + COMPUTE_ENGINE_DISK = 3 + COMPUTE_ENGINE_REGIONAL_DISK = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + target_resource: str = proto.Field( + proto.STRING, + number=3, + ) + target_resource_display_name: str = proto.Field( + proto.STRING, + number=4, + ) + target_resource_type: ResourceType = proto.Field( + proto.ENUM, + number=5, + enum=ResourceType, + ) + target_resource_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + backup_configs_details: MutableSequence[ + "BackupConfigDetails" + ] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message="BackupConfigDetails", + ) + backup_configured: bool = proto.Field( + proto.BOOL, + number=8, + ) + vaulted: bool = proto.Field( + proto.BOOL, + number=9, + ) + + +class BackupConfigDetails(proto.Message): + r"""BackupConfigDetails has information about how the resource is + configured for backups and about the most recent backup taken + for this configuration. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backup_config_source (str): + Output only. The full resource name of the + backup config source resource. For example, + "//backupdr.googleapis.com/v1/projects/{project}/locations/{region}/backupPlans/{backupplanId}" + or + "//compute.googleapis.com/projects/{project}/locations/{region}/resourcePolicies/{resourcePolicyId}". + backup_config_source_display_name (str): + Output only. The display name of the backup + config source resource. + type_ (google.cloud.backupdr_v1.types.BackupConfigDetails.Type): + Output only. The type of the backup config + resource. + state (google.cloud.backupdr_v1.types.BackupConfigDetails.State): + Output only. The state of the backup config + resource. + pitr_settings (google.cloud.backupdr_v1.types.PitrSettings): + Output only. Point in time recovery settings + of the backup configuration resource. + latest_successful_backup_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp of the latest + successful backup created via this backup + configuration. + applicable_resource (str): + Output only. The `full resource + name `__ + of the resource that is applicable for the backup + configuration. Example: + "//compute.googleapis.com/projects/{project}/zones/{zone}/instances/{instance}". + backup_vault (str): + Output only. The `full resource + name `__ + of the backup vault that will store the backups generated + through this backup configuration. Example: + "//backupdr.googleapis.com/v1/projects/{project}/locations/{region}/backupVaults/{backupvaultId}". + backup_locations (MutableSequence[google.cloud.backupdr_v1.types.BackupLocation]): + The locations where the backups are to be + stored. + backup_dr_plan_config (google.cloud.backupdr_v1.types.BackupDrPlanConfig): + Google Cloud Backup and DR's Backup Plan + specific data. + + This field is a member of `oneof`_ ``plan_specific_config``. + backup_dr_template_config (google.cloud.backupdr_v1.types.BackupDrTemplateConfig): + Google Cloud Backup and DR's Template + specific data. + + This field is a member of `oneof`_ ``plan_specific_config``. + """ + + class Type(proto.Enum): + r"""Type of the backup configuration. + This enum may receive new values in the future. + + Values: + TYPE_UNSPECIFIED (0): + Backup config type is unspecified. + CLOUD_SQL_INSTANCE_BACKUP_CONFIG (1): + Backup config is Cloud SQL instance's + automated backup config. + COMPUTE_ENGINE_RESOURCE_POLICY (2): + Backup config is Compute Engine Resource + Policy. + BACKUPDR_BACKUP_PLAN (3): + Backup config is Google Cloud Backup and DR's + Backup Plan. + BACKUPDR_TEMPLATE (4): + Backup config is Google Cloud Backup and DR's + Template. + """ + TYPE_UNSPECIFIED = 0 + CLOUD_SQL_INSTANCE_BACKUP_CONFIG = 1 + COMPUTE_ENGINE_RESOURCE_POLICY = 2 + BACKUPDR_BACKUP_PLAN = 3 + BACKUPDR_TEMPLATE = 4 + + class State(proto.Enum): + r"""The state tells whether the backup config is active or not. + + Values: + STATE_UNSPECIFIED (0): + Backup config state not set. + ACTIVE (1): + The config is in an active state protecting + the resource + INACTIVE (2): + The config is currently not protecting the + resource. Either because it is disabled or the + owning project has been deleted without cleanup + of the actual resource. + ERROR (3): + The config still exists but because of some + error state it is not protecting the resource. + Like the source project is deleted. For eg. + PlanAssociation, BackupPlan is deleted. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + INACTIVE = 2 + ERROR = 3 + + backup_config_source: str = proto.Field( + proto.STRING, + number=1, + ) + backup_config_source_display_name: str = proto.Field( + proto.STRING, + number=2, + ) + type_: Type = proto.Field( + proto.ENUM, + number=3, + enum=Type, + ) + state: State = proto.Field( + proto.ENUM, + number=4, + enum=State, + ) + pitr_settings: "PitrSettings" = proto.Field( + proto.MESSAGE, + number=5, + message="PitrSettings", + ) + latest_successful_backup_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + applicable_resource: str = proto.Field( + proto.STRING, + number=7, + ) + backup_vault: str = proto.Field( + proto.STRING, + number=8, + ) + backup_locations: MutableSequence["BackupLocation"] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message="BackupLocation", + ) + backup_dr_plan_config: "BackupDrPlanConfig" = proto.Field( + proto.MESSAGE, + number=10, + oneof="plan_specific_config", + message="BackupDrPlanConfig", + ) + backup_dr_template_config: "BackupDrTemplateConfig" = proto.Field( + proto.MESSAGE, + number=11, + oneof="plan_specific_config", + message="BackupDrTemplateConfig", + ) + + +class PitrSettings(proto.Message): + r"""Point in time recovery settings of the backup configuration + resource. + + Attributes: + retention_days (int): + Output only. Number of days to retain the + backup. + """ + + retention_days: int = proto.Field( + proto.INT32, + number=1, + ) + + +class BackupDrTemplateConfig(proto.Message): + r"""Provides additional information about Google Cloud Backup + and DR's Template backup configuration. + + Attributes: + first_party_management_uri (str): + Output only. The URI of the BackupDr template + resource for the first party identity users. + third_party_management_uri (str): + Output only. The URI of the BackupDr template + resource for the third party identity users. + """ + + first_party_management_uri: str = proto.Field( + proto.STRING, + number=1, + ) + third_party_management_uri: str = proto.Field( + proto.STRING, + number=2, + ) + + +class BackupDrPlanConfig(proto.Message): + r"""BackupDrPlanConfig has additional information about Google + Cloud Backup and DR's Plan backup configuration. + + Attributes: + backup_dr_plan_rules (MutableSequence[google.cloud.backupdr_v1.types.BackupDrPlanRule]): + Backup rules of the backup plan resource. + """ + + backup_dr_plan_rules: MutableSequence["BackupDrPlanRule"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="BackupDrPlanRule", + ) + + +class BackupDrPlanRule(proto.Message): + r"""BackupDrPlanRule has rule specific information of the backup + plan resource. + + Attributes: + rule_id (str): + Output only. Unique Id of the backup rule. + last_successful_backup_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp of the latest + successful backup created via this backup rule. + """ + + rule_id: str = proto.Field( + proto.STRING, + number=1, + ) + last_successful_backup_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class BackupLocation(proto.Message): + r"""BackupLocation represents a cloud location where a backup can + be stored. + + Attributes: + type_ (google.cloud.backupdr_v1.types.BackupLocation.Type): + Output only. The type of the location. + location_id (str): + Output only. The id of the cloud location. + Example: "us-central1". + """ + + class Type(proto.Enum): + r"""The type of the location. + + Values: + TYPE_UNSPECIFIED (0): + Location type is unspecified. + ZONAL (1): + Location type is zonal. + REGIONAL (2): + Location type is regional. + MULTI_REGIONAL (3): + Location type is multi regional. + """ + TYPE_UNSPECIFIED = 0 + ZONAL = 1 + REGIONAL = 2 + MULTI_REGIONAL = 3 + + type_: Type = proto.Field( + proto.ENUM, + number=1, + enum=Type, + ) + location_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_protection_summary_list_resource_backup_configs_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_protection_summary_list_resource_backup_configs_async.py new file mode 100644 index 000000000000..ff68e857beaf --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_protection_summary_list_resource_backup_configs_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListResourceBackupConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDrProtectionSummary_ListResourceBackupConfigs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_list_resource_backup_configs(): + # Create a client + client = backupdr_v1.BackupDrProtectionSummaryAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListResourceBackupConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_resource_backup_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END backupdr_v1_generated_BackupDrProtectionSummary_ListResourceBackupConfigs_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_protection_summary_list_resource_backup_configs_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_protection_summary_list_resource_backup_configs_sync.py new file mode 100644 index 000000000000..132d6c1c6971 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_protection_summary_list_resource_backup_configs_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListResourceBackupConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDrProtectionSummary_ListResourceBackupConfigs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_list_resource_backup_configs(): + # Create a client + client = backupdr_v1.BackupDrProtectionSummaryClient() + + # Initialize request argument(s) + request = backupdr_v1.ListResourceBackupConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_resource_backup_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END backupdr_v1_generated_BackupDrProtectionSummary_ListResourceBackupConfigs_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json b/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json index c203a867f2f2..c2415c1a2596 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json +++ b/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json @@ -6095,6 +6095,167 @@ } ], "title": "backupdr_v1_generated_backup_dr_update_data_source_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDrProtectionSummaryAsyncClient", + "shortName": "BackupDrProtectionSummaryAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDrProtectionSummaryAsyncClient.list_resource_backup_configs", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDrProtectionSummary.ListResourceBackupConfigs", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDrProtectionSummary", + "shortName": "BackupDrProtectionSummary" + }, + "shortName": "ListResourceBackupConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListResourceBackupConfigsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr_protection_summary.pagers.ListResourceBackupConfigsAsyncPager", + "shortName": "list_resource_backup_configs" + }, + "description": "Sample for ListResourceBackupConfigs", + "file": "backupdr_v1_generated_backup_dr_protection_summary_list_resource_backup_configs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDrProtectionSummary_ListResourceBackupConfigs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_protection_summary_list_resource_backup_configs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDrProtectionSummaryClient", + "shortName": "BackupDrProtectionSummaryClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDrProtectionSummaryClient.list_resource_backup_configs", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDrProtectionSummary.ListResourceBackupConfigs", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDrProtectionSummary", + "shortName": "BackupDrProtectionSummary" + }, + "shortName": "ListResourceBackupConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListResourceBackupConfigsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr_protection_summary.pagers.ListResourceBackupConfigsPager", + "shortName": "list_resource_backup_configs" + }, + "description": "Sample for ListResourceBackupConfigs", + "file": "backupdr_v1_generated_backup_dr_protection_summary_list_resource_backup_configs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDrProtectionSummary_ListResourceBackupConfigs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_protection_summary_list_resource_backup_configs_sync.py" } ] } diff --git a/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr_protection_summary.py b/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr_protection_summary.py new file mode 100644 index 000000000000..004b3ae35fc2 --- /dev/null +++ b/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr_protection_summary.py @@ -0,0 +1,5181 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import AsyncIterable, Iterable +import json +import math + +from google.api_core import api_core_version +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account + +from google.cloud.backupdr_v1.services.backup_dr_protection_summary import ( + BackupDrProtectionSummaryAsyncClient, + BackupDrProtectionSummaryClient, + pagers, + transports, +) +from google.cloud.backupdr_v1.types import protection_summary + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert BackupDrProtectionSummaryClient._get_default_mtls_endpoint(None) is None + assert ( + BackupDrProtectionSummaryClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + BackupDrProtectionSummaryClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + BackupDrProtectionSummaryClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + BackupDrProtectionSummaryClient._get_default_mtls_endpoint( + sandbox_mtls_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + BackupDrProtectionSummaryClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert BackupDrProtectionSummaryClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert BackupDrProtectionSummaryClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert BackupDrProtectionSummaryClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError) as excinfo: + BackupDrProtectionSummaryClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + else: + assert BackupDrProtectionSummaryClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert BackupDrProtectionSummaryClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert BackupDrProtectionSummaryClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert BackupDrProtectionSummaryClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + BackupDrProtectionSummaryClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert BackupDrProtectionSummaryClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test_use_client_cert_effective(): + # Test case 1: Test when `should_use_client_cert` returns True. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=True + ): + assert BackupDrProtectionSummaryClient._use_client_cert_effective() is True + + # Test case 2: Test when `should_use_client_cert` returns False. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should NOT be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=False + ): + assert BackupDrProtectionSummaryClient._use_client_cert_effective() is False + + # Test case 3: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "true". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert BackupDrProtectionSummaryClient._use_client_cert_effective() is True + + # Test case 4: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"} + ): + assert BackupDrProtectionSummaryClient._use_client_cert_effective() is False + + # Test case 5: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "True". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "True"}): + assert BackupDrProtectionSummaryClient._use_client_cert_effective() is True + + # Test case 6: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"} + ): + assert BackupDrProtectionSummaryClient._use_client_cert_effective() is False + + # Test case 7: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "TRUE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "TRUE"}): + assert BackupDrProtectionSummaryClient._use_client_cert_effective() is True + + # Test case 8: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"} + ): + assert BackupDrProtectionSummaryClient._use_client_cert_effective() is False + + # Test case 9: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not set. + # In this case, the method should return False, which is the default value. + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, clear=True): + assert BackupDrProtectionSummaryClient._use_client_cert_effective() is False + + # Test case 10: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should raise a ValueError as the environment variable must be either + # "true" or "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + with pytest.raises(ValueError): + BackupDrProtectionSummaryClient._use_client_cert_effective() + + # Test case 11: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should return False as the environment variable is set to an invalid value. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + assert BackupDrProtectionSummaryClient._use_client_cert_effective() is False + + # Test case 12: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, + # the GOOGLE_API_CONFIG environment variable is unset. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): + with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): + assert ( + BackupDrProtectionSummaryClient._use_client_cert_effective() + is False + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert BackupDrProtectionSummaryClient._get_client_cert_source(None, False) is None + assert ( + BackupDrProtectionSummaryClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + BackupDrProtectionSummaryClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + BackupDrProtectionSummaryClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + BackupDrProtectionSummaryClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + BackupDrProtectionSummaryClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BackupDrProtectionSummaryClient), +) +@mock.patch.object( + BackupDrProtectionSummaryAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BackupDrProtectionSummaryAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = BackupDrProtectionSummaryClient._DEFAULT_UNIVERSE + default_endpoint = ( + BackupDrProtectionSummaryClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = BackupDrProtectionSummaryClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + BackupDrProtectionSummaryClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + BackupDrProtectionSummaryClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == BackupDrProtectionSummaryClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BackupDrProtectionSummaryClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + BackupDrProtectionSummaryClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == BackupDrProtectionSummaryClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BackupDrProtectionSummaryClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == BackupDrProtectionSummaryClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BackupDrProtectionSummaryClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + BackupDrProtectionSummaryClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + BackupDrProtectionSummaryClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + BackupDrProtectionSummaryClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + BackupDrProtectionSummaryClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + BackupDrProtectionSummaryClient._get_universe_domain(None, None) + == BackupDrProtectionSummaryClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + BackupDrProtectionSummaryClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = BackupDrProtectionSummaryClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = BackupDrProtectionSummaryClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (BackupDrProtectionSummaryClient, "grpc"), + (BackupDrProtectionSummaryAsyncClient, "grpc_asyncio"), + (BackupDrProtectionSummaryClient, "rest"), + ], +) +def test_backup_dr_protection_summary_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "backupdr.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://backupdr.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.BackupDrProtectionSummaryGrpcTransport, "grpc"), + (transports.BackupDrProtectionSummaryGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.BackupDrProtectionSummaryRestTransport, "rest"), + ], +) +def test_backup_dr_protection_summary_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (BackupDrProtectionSummaryClient, "grpc"), + (BackupDrProtectionSummaryAsyncClient, "grpc_asyncio"), + (BackupDrProtectionSummaryClient, "rest"), + ], +) +def test_backup_dr_protection_summary_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "backupdr.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://backupdr.googleapis.com" + ) + + +def test_backup_dr_protection_summary_client_get_transport_class(): + transport = BackupDrProtectionSummaryClient.get_transport_class() + available_transports = [ + transports.BackupDrProtectionSummaryGrpcTransport, + transports.BackupDrProtectionSummaryRestTransport, + ] + assert transport in available_transports + + transport = BackupDrProtectionSummaryClient.get_transport_class("grpc") + assert transport == transports.BackupDrProtectionSummaryGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + BackupDrProtectionSummaryClient, + transports.BackupDrProtectionSummaryGrpcTransport, + "grpc", + ), + ( + BackupDrProtectionSummaryAsyncClient, + transports.BackupDrProtectionSummaryGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + BackupDrProtectionSummaryClient, + transports.BackupDrProtectionSummaryRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + BackupDrProtectionSummaryClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BackupDrProtectionSummaryClient), +) +@mock.patch.object( + BackupDrProtectionSummaryAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BackupDrProtectionSummaryAsyncClient), +) +def test_backup_dr_protection_summary_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + BackupDrProtectionSummaryClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + BackupDrProtectionSummaryClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + BackupDrProtectionSummaryClient, + transports.BackupDrProtectionSummaryGrpcTransport, + "grpc", + "true", + ), + ( + BackupDrProtectionSummaryAsyncClient, + transports.BackupDrProtectionSummaryGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + BackupDrProtectionSummaryClient, + transports.BackupDrProtectionSummaryGrpcTransport, + "grpc", + "false", + ), + ( + BackupDrProtectionSummaryAsyncClient, + transports.BackupDrProtectionSummaryGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + BackupDrProtectionSummaryClient, + transports.BackupDrProtectionSummaryRestTransport, + "rest", + "true", + ), + ( + BackupDrProtectionSummaryClient, + transports.BackupDrProtectionSummaryRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + BackupDrProtectionSummaryClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BackupDrProtectionSummaryClient), +) +@mock.patch.object( + BackupDrProtectionSummaryAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BackupDrProtectionSummaryAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_backup_dr_protection_summary_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [BackupDrProtectionSummaryClient, BackupDrProtectionSummaryAsyncClient], +) +@mock.patch.object( + BackupDrProtectionSummaryClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(BackupDrProtectionSummaryClient), +) +@mock.patch.object( + BackupDrProtectionSummaryAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(BackupDrProtectionSummaryAsyncClient), +) +def test_backup_dr_protection_summary_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset. + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + +@pytest.mark.parametrize( + "client_class", + [BackupDrProtectionSummaryClient, BackupDrProtectionSummaryAsyncClient], +) +@mock.patch.object( + BackupDrProtectionSummaryClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BackupDrProtectionSummaryClient), +) +@mock.patch.object( + BackupDrProtectionSummaryAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BackupDrProtectionSummaryAsyncClient), +) +def test_backup_dr_protection_summary_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = BackupDrProtectionSummaryClient._DEFAULT_UNIVERSE + default_endpoint = ( + BackupDrProtectionSummaryClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = BackupDrProtectionSummaryClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + BackupDrProtectionSummaryClient, + transports.BackupDrProtectionSummaryGrpcTransport, + "grpc", + ), + ( + BackupDrProtectionSummaryAsyncClient, + transports.BackupDrProtectionSummaryGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + BackupDrProtectionSummaryClient, + transports.BackupDrProtectionSummaryRestTransport, + "rest", + ), + ], +) +def test_backup_dr_protection_summary_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + BackupDrProtectionSummaryClient, + transports.BackupDrProtectionSummaryGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + BackupDrProtectionSummaryAsyncClient, + transports.BackupDrProtectionSummaryGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + BackupDrProtectionSummaryClient, + transports.BackupDrProtectionSummaryRestTransport, + "rest", + None, + ), + ], +) +def test_backup_dr_protection_summary_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_backup_dr_protection_summary_client_client_options_from_dict(): + with mock.patch( + "google.cloud.backupdr_v1.services.backup_dr_protection_summary.transports.BackupDrProtectionSummaryGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = BackupDrProtectionSummaryClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + BackupDrProtectionSummaryClient, + transports.BackupDrProtectionSummaryGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + BackupDrProtectionSummaryAsyncClient, + transports.BackupDrProtectionSummaryGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_backup_dr_protection_summary_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "backupdr.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="backupdr.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + protection_summary.ListResourceBackupConfigsRequest, + dict, + ], +) +def test_list_resource_backup_configs(request_type, transport: str = "grpc"): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_resource_backup_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = protection_summary.ListResourceBackupConfigsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_resource_backup_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = protection_summary.ListResourceBackupConfigsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListResourceBackupConfigsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_resource_backup_configs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = protection_summary.ListResourceBackupConfigsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_resource_backup_configs), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_resource_backup_configs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == protection_summary.ListResourceBackupConfigsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_resource_backup_configs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_resource_backup_configs + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_resource_backup_configs + ] = mock_rpc + request = {} + client.list_resource_backup_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_resource_backup_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_resource_backup_configs_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_resource_backup_configs + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_resource_backup_configs + ] = mock_rpc + + request = {} + await client.list_resource_backup_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_resource_backup_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_resource_backup_configs_async( + transport: str = "grpc_asyncio", + request_type=protection_summary.ListResourceBackupConfigsRequest, +): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_resource_backup_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + protection_summary.ListResourceBackupConfigsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_resource_backup_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = protection_summary.ListResourceBackupConfigsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListResourceBackupConfigsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_resource_backup_configs_async_from_dict(): + await test_list_resource_backup_configs_async(request_type=dict) + + +def test_list_resource_backup_configs_field_headers(): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = protection_summary.ListResourceBackupConfigsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_resource_backup_configs), "__call__" + ) as call: + call.return_value = protection_summary.ListResourceBackupConfigsResponse() + client.list_resource_backup_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_resource_backup_configs_field_headers_async(): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = protection_summary.ListResourceBackupConfigsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_resource_backup_configs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + protection_summary.ListResourceBackupConfigsResponse() + ) + await client.list_resource_backup_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_resource_backup_configs_flattened(): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_resource_backup_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = protection_summary.ListResourceBackupConfigsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_resource_backup_configs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_resource_backup_configs_flattened_error(): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_resource_backup_configs( + protection_summary.ListResourceBackupConfigsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_resource_backup_configs_flattened_async(): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_resource_backup_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = protection_summary.ListResourceBackupConfigsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + protection_summary.ListResourceBackupConfigsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_resource_backup_configs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_resource_backup_configs_flattened_error_async(): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_resource_backup_configs( + protection_summary.ListResourceBackupConfigsRequest(), + parent="parent_value", + ) + + +def test_list_resource_backup_configs_pager(transport_name: str = "grpc"): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_resource_backup_configs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + protection_summary.ListResourceBackupConfigsResponse( + resource_backup_configs=[ + protection_summary.ResourceBackupConfig(), + protection_summary.ResourceBackupConfig(), + protection_summary.ResourceBackupConfig(), + ], + next_page_token="abc", + ), + protection_summary.ListResourceBackupConfigsResponse( + resource_backup_configs=[], + next_page_token="def", + ), + protection_summary.ListResourceBackupConfigsResponse( + resource_backup_configs=[ + protection_summary.ResourceBackupConfig(), + ], + next_page_token="ghi", + ), + protection_summary.ListResourceBackupConfigsResponse( + resource_backup_configs=[ + protection_summary.ResourceBackupConfig(), + protection_summary.ResourceBackupConfig(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_resource_backup_configs( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, protection_summary.ResourceBackupConfig) for i in results + ) + + +def test_list_resource_backup_configs_pages(transport_name: str = "grpc"): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_resource_backup_configs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + protection_summary.ListResourceBackupConfigsResponse( + resource_backup_configs=[ + protection_summary.ResourceBackupConfig(), + protection_summary.ResourceBackupConfig(), + protection_summary.ResourceBackupConfig(), + ], + next_page_token="abc", + ), + protection_summary.ListResourceBackupConfigsResponse( + resource_backup_configs=[], + next_page_token="def", + ), + protection_summary.ListResourceBackupConfigsResponse( + resource_backup_configs=[ + protection_summary.ResourceBackupConfig(), + ], + next_page_token="ghi", + ), + protection_summary.ListResourceBackupConfigsResponse( + resource_backup_configs=[ + protection_summary.ResourceBackupConfig(), + protection_summary.ResourceBackupConfig(), + ], + ), + RuntimeError, + ) + pages = list(client.list_resource_backup_configs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_resource_backup_configs_async_pager(): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_resource_backup_configs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + protection_summary.ListResourceBackupConfigsResponse( + resource_backup_configs=[ + protection_summary.ResourceBackupConfig(), + protection_summary.ResourceBackupConfig(), + protection_summary.ResourceBackupConfig(), + ], + next_page_token="abc", + ), + protection_summary.ListResourceBackupConfigsResponse( + resource_backup_configs=[], + next_page_token="def", + ), + protection_summary.ListResourceBackupConfigsResponse( + resource_backup_configs=[ + protection_summary.ResourceBackupConfig(), + ], + next_page_token="ghi", + ), + protection_summary.ListResourceBackupConfigsResponse( + resource_backup_configs=[ + protection_summary.ResourceBackupConfig(), + protection_summary.ResourceBackupConfig(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_resource_backup_configs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, protection_summary.ResourceBackupConfig) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_resource_backup_configs_async_pages(): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_resource_backup_configs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + protection_summary.ListResourceBackupConfigsResponse( + resource_backup_configs=[ + protection_summary.ResourceBackupConfig(), + protection_summary.ResourceBackupConfig(), + protection_summary.ResourceBackupConfig(), + ], + next_page_token="abc", + ), + protection_summary.ListResourceBackupConfigsResponse( + resource_backup_configs=[], + next_page_token="def", + ), + protection_summary.ListResourceBackupConfigsResponse( + resource_backup_configs=[ + protection_summary.ResourceBackupConfig(), + ], + next_page_token="ghi", + ), + protection_summary.ListResourceBackupConfigsResponse( + resource_backup_configs=[ + protection_summary.ResourceBackupConfig(), + protection_summary.ResourceBackupConfig(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_resource_backup_configs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_list_resource_backup_configs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_resource_backup_configs + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_resource_backup_configs + ] = mock_rpc + + request = {} + client.list_resource_backup_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_resource_backup_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_resource_backup_configs_rest_required_fields( + request_type=protection_summary.ListResourceBackupConfigsRequest, +): + transport_class = transports.BackupDrProtectionSummaryRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_resource_backup_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_resource_backup_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = protection_summary.ListResourceBackupConfigsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = protection_summary.ListResourceBackupConfigsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_resource_backup_configs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_resource_backup_configs_rest_unset_required_fields(): + transport = transports.BackupDrProtectionSummaryRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_resource_backup_configs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_resource_backup_configs_rest_flattened(): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = protection_summary.ListResourceBackupConfigsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = protection_summary.ListResourceBackupConfigsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_resource_backup_configs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/resourceBackupConfigs" + % client.transport._host, + args[1], + ) + + +def test_list_resource_backup_configs_rest_flattened_error(transport: str = "rest"): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_resource_backup_configs( + protection_summary.ListResourceBackupConfigsRequest(), + parent="parent_value", + ) + + +def test_list_resource_backup_configs_rest_pager(transport: str = "rest"): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + protection_summary.ListResourceBackupConfigsResponse( + resource_backup_configs=[ + protection_summary.ResourceBackupConfig(), + protection_summary.ResourceBackupConfig(), + protection_summary.ResourceBackupConfig(), + ], + next_page_token="abc", + ), + protection_summary.ListResourceBackupConfigsResponse( + resource_backup_configs=[], + next_page_token="def", + ), + protection_summary.ListResourceBackupConfigsResponse( + resource_backup_configs=[ + protection_summary.ResourceBackupConfig(), + ], + next_page_token="ghi", + ), + protection_summary.ListResourceBackupConfigsResponse( + resource_backup_configs=[ + protection_summary.ResourceBackupConfig(), + protection_summary.ResourceBackupConfig(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + protection_summary.ListResourceBackupConfigsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_resource_backup_configs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, protection_summary.ResourceBackupConfig) for i in results + ) + + pages = list(client.list_resource_backup_configs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.BackupDrProtectionSummaryGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.BackupDrProtectionSummaryGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BackupDrProtectionSummaryClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.BackupDrProtectionSummaryGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BackupDrProtectionSummaryClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BackupDrProtectionSummaryClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.BackupDrProtectionSummaryGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BackupDrProtectionSummaryClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.BackupDrProtectionSummaryGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = BackupDrProtectionSummaryClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.BackupDrProtectionSummaryGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.BackupDrProtectionSummaryGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BackupDrProtectionSummaryGrpcTransport, + transports.BackupDrProtectionSummaryGrpcAsyncIOTransport, + transports.BackupDrProtectionSummaryRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = BackupDrProtectionSummaryClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_resource_backup_configs_empty_call_grpc(): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_resource_backup_configs), "__call__" + ) as call: + call.return_value = protection_summary.ListResourceBackupConfigsResponse() + client.list_resource_backup_configs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = protection_summary.ListResourceBackupConfigsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = BackupDrProtectionSummaryAsyncClient.get_transport_class( + "grpc_asyncio" + )(credentials=async_anonymous_credentials()) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_resource_backup_configs_empty_call_grpc_asyncio(): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_resource_backup_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + protection_summary.ListResourceBackupConfigsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_resource_backup_configs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = protection_summary.ListResourceBackupConfigsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = BackupDrProtectionSummaryClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_resource_backup_configs_rest_bad_request( + request_type=protection_summary.ListResourceBackupConfigsRequest, +): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_resource_backup_configs(request) + + +@pytest.mark.parametrize( + "request_type", + [ + protection_summary.ListResourceBackupConfigsRequest, + dict, + ], +) +def test_list_resource_backup_configs_rest_call_success(request_type): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = protection_summary.ListResourceBackupConfigsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = protection_summary.ListResourceBackupConfigsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_resource_backup_configs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListResourceBackupConfigsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_resource_backup_configs_rest_interceptors(null_interceptor): + transport = transports.BackupDrProtectionSummaryRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BackupDrProtectionSummaryRestInterceptor(), + ) + client = BackupDrProtectionSummaryClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDrProtectionSummaryRestInterceptor, + "post_list_resource_backup_configs", + ) as post, mock.patch.object( + transports.BackupDrProtectionSummaryRestInterceptor, + "post_list_resource_backup_configs_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BackupDrProtectionSummaryRestInterceptor, + "pre_list_resource_backup_configs", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = protection_summary.ListResourceBackupConfigsRequest.pb( + protection_summary.ListResourceBackupConfigsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = protection_summary.ListResourceBackupConfigsResponse.to_json( + protection_summary.ListResourceBackupConfigsResponse() + ) + req.return_value.content = return_value + + request = protection_summary.ListResourceBackupConfigsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = protection_summary.ListResourceBackupConfigsResponse() + post_with_metadata.return_value = ( + protection_summary.ListResourceBackupConfigsResponse(), + metadata, + ) + + client.list_resource_backup_configs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + request_type=locations_pb2.ListLocationsRequest, +): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_get_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.GetIamPolicyRequest, +): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/locations/sample2/managementServers/sample3"}, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/managementServers/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.SetIamPolicyRequest, +): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/locations/sample2/managementServers/sample3"}, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/managementServers/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/locations/sample2/managementServers/sample3"}, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/managementServers/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_cancel_operation_rest_bad_request( + request_type=operations_pb2.CancelOperationRequest, +): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + request_type=operations_pb2.DeleteOperationRequest, +): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_initialize_client_w_rest(): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_resource_backup_configs_empty_call_rest(): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_resource_backup_configs), "__call__" + ) as call: + client.list_resource_backup_configs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = protection_summary.ListResourceBackupConfigsRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.BackupDrProtectionSummaryGrpcTransport, + ) + + +def test_backup_dr_protection_summary_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.BackupDrProtectionSummaryTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_backup_dr_protection_summary_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.backupdr_v1.services.backup_dr_protection_summary.transports.BackupDrProtectionSummaryTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.BackupDrProtectionSummaryTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_resource_backup_configs", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_backup_dr_protection_summary_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.backupdr_v1.services.backup_dr_protection_summary.transports.BackupDrProtectionSummaryTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.BackupDrProtectionSummaryTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_backup_dr_protection_summary_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.backupdr_v1.services.backup_dr_protection_summary.transports.BackupDrProtectionSummaryTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.BackupDrProtectionSummaryTransport() + adc.assert_called_once() + + +def test_backup_dr_protection_summary_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + BackupDrProtectionSummaryClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BackupDrProtectionSummaryGrpcTransport, + transports.BackupDrProtectionSummaryGrpcAsyncIOTransport, + ], +) +def test_backup_dr_protection_summary_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BackupDrProtectionSummaryGrpcTransport, + transports.BackupDrProtectionSummaryGrpcAsyncIOTransport, + transports.BackupDrProtectionSummaryRestTransport, + ], +) +def test_backup_dr_protection_summary_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.BackupDrProtectionSummaryGrpcTransport, grpc_helpers), + (transports.BackupDrProtectionSummaryGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_backup_dr_protection_summary_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "backupdr.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="backupdr.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BackupDrProtectionSummaryGrpcTransport, + transports.BackupDrProtectionSummaryGrpcAsyncIOTransport, + ], +) +def test_backup_dr_protection_summary_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_backup_dr_protection_summary_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.BackupDrProtectionSummaryRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_backup_dr_protection_summary_host_no_port(transport_name): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="backupdr.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "backupdr.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://backupdr.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_backup_dr_protection_summary_host_with_port(transport_name): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="backupdr.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "backupdr.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://backupdr.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_backup_dr_protection_summary_client_transport_session_collision( + transport_name, +): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = BackupDrProtectionSummaryClient( + credentials=creds1, + transport=transport_name, + ) + client2 = BackupDrProtectionSummaryClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_resource_backup_configs._session + session2 = client2.transport.list_resource_backup_configs._session + assert session1 != session2 + + +def test_backup_dr_protection_summary_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.BackupDrProtectionSummaryGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_backup_dr_protection_summary_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.BackupDrProtectionSummaryGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.filterwarnings("ignore::FutureWarning") +@pytest.mark.parametrize( + "transport_class", + [ + transports.BackupDrProtectionSummaryGrpcTransport, + transports.BackupDrProtectionSummaryGrpcAsyncIOTransport, + ], +) +def test_backup_dr_protection_summary_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.BackupDrProtectionSummaryGrpcTransport, + transports.BackupDrProtectionSummaryGrpcAsyncIOTransport, + ], +) +def test_backup_dr_protection_summary_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_resource_backup_config_path(): + project = "squid" + location = "clam" + resource_backup_config = "whelk" + expected = "projects/{project}/locations/{location}/resourceBackupConfigs/{resource_backup_config}".format( + project=project, + location=location, + resource_backup_config=resource_backup_config, + ) + actual = BackupDrProtectionSummaryClient.resource_backup_config_path( + project, location, resource_backup_config + ) + assert expected == actual + + +def test_parse_resource_backup_config_path(): + expected = { + "project": "octopus", + "location": "oyster", + "resource_backup_config": "nudibranch", + } + path = BackupDrProtectionSummaryClient.resource_backup_config_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDrProtectionSummaryClient.parse_resource_backup_config_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = BackupDrProtectionSummaryClient.common_billing_account_path( + billing_account + ) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = BackupDrProtectionSummaryClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDrProtectionSummaryClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = BackupDrProtectionSummaryClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = BackupDrProtectionSummaryClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDrProtectionSummaryClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = BackupDrProtectionSummaryClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = BackupDrProtectionSummaryClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDrProtectionSummaryClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = BackupDrProtectionSummaryClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = BackupDrProtectionSummaryClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDrProtectionSummaryClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = BackupDrProtectionSummaryClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = BackupDrProtectionSummaryClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDrProtectionSummaryClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.BackupDrProtectionSummaryTransport, "_prep_wrapped_messages" + ) as prep: + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.BackupDrProtectionSummaryTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = BackupDrProtectionSummaryClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = BackupDrProtectionSummaryAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = BackupDrProtectionSummaryClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + BackupDrProtectionSummaryClient, + transports.BackupDrProtectionSummaryGrpcTransport, + ), + ( + BackupDrProtectionSummaryAsyncClient, + transports.BackupDrProtectionSummaryGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-config/google/cloud/config/__init__.py b/packages/google-cloud-config/google/cloud/config/__init__.py index f07c37a361c5..287b89353973 100644 --- a/packages/google-cloud-config/google/cloud/config/__init__.py +++ b/packages/google-cloud-config/google/cloud/config/__init__.py @@ -22,6 +22,7 @@ from google.cloud.config_v1.services.config.client import ConfigClient from google.cloud.config_v1.types.config import ( ApplyResults, + AutoMigrationConfig, CreateDeploymentRequest, CreatePreviewRequest, DeleteDeploymentRequest, @@ -34,6 +35,7 @@ ExportPreviewResultRequest, ExportPreviewResultResponse, ExportRevisionStatefileRequest, + GetAutoMigrationConfigRequest, GetDeploymentRequest, GetPreviewRequest, GetResourceChangeRequest, @@ -83,6 +85,7 @@ TerraformVariable, TerraformVersion, UnlockDeploymentRequest, + UpdateAutoMigrationConfigRequest, UpdateDeploymentRequest, ) @@ -90,6 +93,7 @@ "ConfigClient", "ConfigAsyncClient", "ApplyResults", + "AutoMigrationConfig", "CreateDeploymentRequest", "CreatePreviewRequest", "DeleteDeploymentRequest", @@ -102,6 +106,7 @@ "ExportPreviewResultRequest", "ExportPreviewResultResponse", "ExportRevisionStatefileRequest", + "GetAutoMigrationConfigRequest", "GetDeploymentRequest", "GetPreviewRequest", "GetResourceChangeRequest", @@ -150,6 +155,7 @@ "TerraformVariable", "TerraformVersion", "UnlockDeploymentRequest", + "UpdateAutoMigrationConfigRequest", "UpdateDeploymentRequest", "QuotaValidation", ) diff --git a/packages/google-cloud-config/google/cloud/config_v1/__init__.py b/packages/google-cloud-config/google/cloud/config_v1/__init__.py index b26697efabe4..70a54ee1fe28 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/__init__.py +++ b/packages/google-cloud-config/google/cloud/config_v1/__init__.py @@ -31,6 +31,7 @@ from .services.config import ConfigAsyncClient, ConfigClient from .types.config import ( ApplyResults, + AutoMigrationConfig, CreateDeploymentRequest, CreatePreviewRequest, DeleteDeploymentRequest, @@ -43,6 +44,7 @@ ExportPreviewResultRequest, ExportPreviewResultResponse, ExportRevisionStatefileRequest, + GetAutoMigrationConfigRequest, GetDeploymentRequest, GetPreviewRequest, GetResourceChangeRequest, @@ -92,6 +94,7 @@ TerraformVariable, TerraformVersion, UnlockDeploymentRequest, + UpdateAutoMigrationConfigRequest, UpdateDeploymentRequest, ) @@ -192,6 +195,7 @@ def _get_version(dependency_name): __all__ = ( "ConfigAsyncClient", "ApplyResults", + "AutoMigrationConfig", "ConfigClient", "CreateDeploymentRequest", "CreatePreviewRequest", @@ -205,6 +209,7 @@ def _get_version(dependency_name): "ExportPreviewResultRequest", "ExportPreviewResultResponse", "ExportRevisionStatefileRequest", + "GetAutoMigrationConfigRequest", "GetDeploymentRequest", "GetPreviewRequest", "GetResourceChangeRequest", @@ -254,5 +259,6 @@ def _get_version(dependency_name): "TerraformVariable", "TerraformVersion", "UnlockDeploymentRequest", + "UpdateAutoMigrationConfigRequest", "UpdateDeploymentRequest", ) diff --git a/packages/google-cloud-config/google/cloud/config_v1/gapic_metadata.json b/packages/google-cloud-config/google/cloud/config_v1/gapic_metadata.json index fd2c8bc6dd14..746d54e27899 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/gapic_metadata.json +++ b/packages/google-cloud-config/google/cloud/config_v1/gapic_metadata.json @@ -55,6 +55,11 @@ "export_revision_statefile" ] }, + "GetAutoMigrationConfig": { + "methods": [ + "get_auto_migration_config" + ] + }, "GetDeployment": { "methods": [ "get_deployment" @@ -140,6 +145,11 @@ "unlock_deployment" ] }, + "UpdateAutoMigrationConfig": { + "methods": [ + "update_auto_migration_config" + ] + }, "UpdateDeployment": { "methods": [ "update_deployment" @@ -195,6 +205,11 @@ "export_revision_statefile" ] }, + "GetAutoMigrationConfig": { + "methods": [ + "get_auto_migration_config" + ] + }, "GetDeployment": { "methods": [ "get_deployment" @@ -280,6 +295,11 @@ "unlock_deployment" ] }, + "UpdateAutoMigrationConfig": { + "methods": [ + "update_auto_migration_config" + ] + }, "UpdateDeployment": { "methods": [ "update_deployment" @@ -335,6 +355,11 @@ "export_revision_statefile" ] }, + "GetAutoMigrationConfig": { + "methods": [ + "get_auto_migration_config" + ] + }, "GetDeployment": { "methods": [ "get_deployment" @@ -420,6 +445,11 @@ "unlock_deployment" ] }, + "UpdateAutoMigrationConfig": { + "methods": [ + "update_auto_migration_config" + ] + }, "UpdateDeployment": { "methods": [ "update_deployment" diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/async_client.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/async_client.py index 9cf1d700785b..799e2a9596ee 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/async_client.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/async_client.py @@ -28,6 +28,7 @@ Type, Union, ) +import uuid from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 @@ -86,6 +87,10 @@ class ConfigAsyncClient: _DEFAULT_ENDPOINT_TEMPLATE = ConfigClient._DEFAULT_ENDPOINT_TEMPLATE _DEFAULT_UNIVERSE = ConfigClient._DEFAULT_UNIVERSE + auto_migration_config_path = staticmethod(ConfigClient.auto_migration_config_path) + parse_auto_migration_config_path = staticmethod( + ConfigClient.parse_auto_migration_config_path + ) deployment_path = staticmethod(ConfigClient.deployment_path) parse_deployment_path = staticmethod(ConfigClient.parse_deployment_path) preview_path = staticmethod(ConfigClient.preview_path) @@ -3540,6 +3545,258 @@ async def sample_get_resource_drift(): # Done; return the response. return response + async def get_auto_migration_config( + self, + request: Optional[Union[config.GetAutoMigrationConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> config.AutoMigrationConfig: + r"""Get the AutoMigrationConfig for a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_get_auto_migration_config(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.GetAutoMigrationConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_auto_migration_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.GetAutoMigrationConfigRequest, dict]]): + The request object. The request message for the + GetAutoMigrationConfig method. + name (:class:`str`): + Required. The name of the AutoMigrationConfig. Format: + 'projects/{project_id}/locations/{location}/AutoMigrationConfig'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.config_v1.types.AutoMigrationConfig: + AutoMigrationConfig contains the + automigration configuration for a + project. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, config.GetAutoMigrationConfigRequest): + request = config.GetAutoMigrationConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_auto_migration_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_auto_migration_config( + self, + request: Optional[Union[config.UpdateAutoMigrationConfigRequest, dict]] = None, + *, + auto_migration_config: Optional[config.AutoMigrationConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the AutoMigrationConfig for a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_update_auto_migration_config(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.UpdateAutoMigrationConfigRequest( + ) + + # Make the request + operation = client.update_auto_migration_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.UpdateAutoMigrationConfigRequest, dict]]): + The request object. The request message for the + UpdateAutoMigrationConfig method. + auto_migration_config (:class:`google.cloud.config_v1.types.AutoMigrationConfig`): + Required. The AutoMigrationConfig to + update. + + This corresponds to the ``auto_migration_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. The update mask applies to the resource. See + [google.protobuf.FieldMask][google.protobuf.FieldMask]. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.config_v1.types.AutoMigrationConfig` + AutoMigrationConfig contains the automigration + configuration for a project. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [auto_migration_config, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, config.UpdateAutoMigrationConfigRequest): + request = config.UpdateAutoMigrationConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if auto_migration_config is not None: + request.auto_migration_config = auto_migration_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_auto_migration_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("auto_migration_config.name", request.auto_migration_config.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + config.AutoMigrationConfig, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py index 79220996987d..a05a7cbc1368 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py @@ -32,6 +32,7 @@ Union, cast, ) +import uuid import warnings from google.api_core import client_options as client_options_lib @@ -235,6 +236,26 @@ def transport(self) -> ConfigTransport: """ return self._transport + @staticmethod + def auto_migration_config_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified auto_migration_config string.""" + return "projects/{project}/locations/{location}/autoMigrationConfig".format( + project=project, + location=location, + ) + + @staticmethod + def parse_auto_migration_config_path(path: str) -> Dict[str, str]: + """Parses a auto_migration_config path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/autoMigrationConfig$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def deployment_path( project: str, @@ -4082,6 +4103,256 @@ def sample_get_resource_drift(): # Done; return the response. return response + def get_auto_migration_config( + self, + request: Optional[Union[config.GetAutoMigrationConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> config.AutoMigrationConfig: + r"""Get the AutoMigrationConfig for a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_get_auto_migration_config(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.GetAutoMigrationConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_auto_migration_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.GetAutoMigrationConfigRequest, dict]): + The request object. The request message for the + GetAutoMigrationConfig method. + name (str): + Required. The name of the AutoMigrationConfig. Format: + 'projects/{project_id}/locations/{location}/AutoMigrationConfig'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.config_v1.types.AutoMigrationConfig: + AutoMigrationConfig contains the + automigration configuration for a + project. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, config.GetAutoMigrationConfigRequest): + request = config.GetAutoMigrationConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_auto_migration_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_auto_migration_config( + self, + request: Optional[Union[config.UpdateAutoMigrationConfigRequest, dict]] = None, + *, + auto_migration_config: Optional[config.AutoMigrationConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates the AutoMigrationConfig for a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_update_auto_migration_config(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.UpdateAutoMigrationConfigRequest( + ) + + # Make the request + operation = client.update_auto_migration_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.UpdateAutoMigrationConfigRequest, dict]): + The request object. The request message for the + UpdateAutoMigrationConfig method. + auto_migration_config (google.cloud.config_v1.types.AutoMigrationConfig): + Required. The AutoMigrationConfig to + update. + + This corresponds to the ``auto_migration_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The update mask applies to the resource. See + [google.protobuf.FieldMask][google.protobuf.FieldMask]. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.config_v1.types.AutoMigrationConfig` + AutoMigrationConfig contains the automigration + configuration for a project. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [auto_migration_config, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, config.UpdateAutoMigrationConfigRequest): + request = config.UpdateAutoMigrationConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if auto_migration_config is not None: + request.auto_migration_config = auto_migration_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_auto_migration_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("auto_migration_config.name", request.auto_migration_config.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + config.AutoMigrationConfig, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "ConfigClient": return self diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/base.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/base.py index 3782c14bf6e8..99bb7ba773c7 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/base.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/base.py @@ -273,6 +273,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_auto_migration_config: gapic_v1.method.wrap_method( + self.get_auto_migration_config, + default_timeout=None, + client_info=client_info, + ), + self.update_auto_migration_config: gapic_v1.method.wrap_method( + self.update_auto_migration_config, + default_timeout=None, + client_info=client_info, + ), self.get_location: gapic_v1.method.wrap_method( self.get_location, default_timeout=None, @@ -588,6 +598,24 @@ def get_resource_drift( ]: raise NotImplementedError() + @property + def get_auto_migration_config( + self, + ) -> Callable[ + [config.GetAutoMigrationConfigRequest], + Union[config.AutoMigrationConfig, Awaitable[config.AutoMigrationConfig]], + ]: + raise NotImplementedError() + + @property + def update_auto_migration_config( + self, + ) -> Callable[ + [config.UpdateAutoMigrationConfigRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc.py index 713d2065786a..f36f6f00f994 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc.py @@ -1063,6 +1063,62 @@ def get_resource_drift( ) return self._stubs["get_resource_drift"] + @property + def get_auto_migration_config( + self, + ) -> Callable[[config.GetAutoMigrationConfigRequest], config.AutoMigrationConfig]: + r"""Return a callable for the get auto migration config method over gRPC. + + Get the AutoMigrationConfig for a given project and + location. + + Returns: + Callable[[~.GetAutoMigrationConfigRequest], + ~.AutoMigrationConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_auto_migration_config" not in self._stubs: + self._stubs["get_auto_migration_config"] = self._logged_channel.unary_unary( + "/google.cloud.config.v1.Config/GetAutoMigrationConfig", + request_serializer=config.GetAutoMigrationConfigRequest.serialize, + response_deserializer=config.AutoMigrationConfig.deserialize, + ) + return self._stubs["get_auto_migration_config"] + + @property + def update_auto_migration_config( + self, + ) -> Callable[[config.UpdateAutoMigrationConfigRequest], operations_pb2.Operation]: + r"""Return a callable for the update auto migration config method over gRPC. + + Updates the AutoMigrationConfig for a given project + and location. + + Returns: + Callable[[~.UpdateAutoMigrationConfigRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_auto_migration_config" not in self._stubs: + self._stubs[ + "update_auto_migration_config" + ] = self._logged_channel.unary_unary( + "/google.cloud.config.v1.Config/UpdateAutoMigrationConfig", + request_serializer=config.UpdateAutoMigrationConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_auto_migration_config"] + def close(self): self._logged_channel.close() diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc_asyncio.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc_asyncio.py index 393db373d3fe..5c46dbe7b254 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc_asyncio.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc_asyncio.py @@ -1098,6 +1098,66 @@ def get_resource_drift( ) return self._stubs["get_resource_drift"] + @property + def get_auto_migration_config( + self, + ) -> Callable[ + [config.GetAutoMigrationConfigRequest], Awaitable[config.AutoMigrationConfig] + ]: + r"""Return a callable for the get auto migration config method over gRPC. + + Get the AutoMigrationConfig for a given project and + location. + + Returns: + Callable[[~.GetAutoMigrationConfigRequest], + Awaitable[~.AutoMigrationConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_auto_migration_config" not in self._stubs: + self._stubs["get_auto_migration_config"] = self._logged_channel.unary_unary( + "/google.cloud.config.v1.Config/GetAutoMigrationConfig", + request_serializer=config.GetAutoMigrationConfigRequest.serialize, + response_deserializer=config.AutoMigrationConfig.deserialize, + ) + return self._stubs["get_auto_migration_config"] + + @property + def update_auto_migration_config( + self, + ) -> Callable[ + [config.UpdateAutoMigrationConfigRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update auto migration config method over gRPC. + + Updates the AutoMigrationConfig for a given project + and location. + + Returns: + Callable[[~.UpdateAutoMigrationConfigRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_auto_migration_config" not in self._stubs: + self._stubs[ + "update_auto_migration_config" + ] = self._logged_channel.unary_unary( + "/google.cloud.config.v1.Config/UpdateAutoMigrationConfig", + request_serializer=config.UpdateAutoMigrationConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_auto_migration_config"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -1236,6 +1296,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_auto_migration_config: self._wrap_method( + self.get_auto_migration_config, + default_timeout=None, + client_info=client_info, + ), + self.update_auto_migration_config: self._wrap_method( + self.update_auto_migration_config, + default_timeout=None, + client_info=client_info, + ), self.get_location: self._wrap_method( self.get_location, default_timeout=None, diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest.py index 323eb5c65331..d163555b639a 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest.py @@ -145,6 +145,14 @@ def post_export_revision_statefile(self, response): logging.log(f"Received response: {response}") return response + def pre_get_auto_migration_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_auto_migration_config(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_deployment(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -281,6 +289,14 @@ def post_unlock_deployment(self, response): logging.log(f"Received response: {response}") return response + def pre_update_auto_migration_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_auto_migration_config(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_deployment(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -681,6 +697,54 @@ def post_export_revision_statefile_with_metadata( """ return response, metadata + def pre_get_auto_migration_config( + self, + request: config.GetAutoMigrationConfigRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + config.GetAutoMigrationConfigRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_auto_migration_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_get_auto_migration_config( + self, response: config.AutoMigrationConfig + ) -> config.AutoMigrationConfig: + """Post-rpc interceptor for get_auto_migration_config + + DEPRECATED. Please use the `post_get_auto_migration_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Config server but before + it is returned to user code. This `post_get_auto_migration_config` interceptor runs + before the `post_get_auto_migration_config_with_metadata` interceptor. + """ + return response + + def post_get_auto_migration_config_with_metadata( + self, + response: config.AutoMigrationConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[config.AutoMigrationConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_auto_migration_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_get_auto_migration_config_with_metadata` + interceptor in new development instead of the `post_get_auto_migration_config` interceptor. + When both interceptors are used, this `post_get_auto_migration_config_with_metadata` interceptor runs after the + `post_get_auto_migration_config` interceptor. The (possibly modified) response returned by + `post_get_auto_migration_config` will be passed to + `post_get_auto_migration_config_with_metadata`. + """ + return response, metadata + def pre_get_deployment( self, request: config.GetDeploymentRequest, @@ -1469,6 +1533,54 @@ def post_unlock_deployment_with_metadata( """ return response, metadata + def pre_update_auto_migration_config( + self, + request: config.UpdateAutoMigrationConfigRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + config.UpdateAutoMigrationConfigRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for update_auto_migration_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_update_auto_migration_config( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_auto_migration_config + + DEPRECATED. Please use the `post_update_auto_migration_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Config server but before + it is returned to user code. This `post_update_auto_migration_config` interceptor runs + before the `post_update_auto_migration_config_with_metadata` interceptor. + """ + return response + + def post_update_auto_migration_config_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_auto_migration_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_update_auto_migration_config_with_metadata` + interceptor in new development instead of the `post_update_auto_migration_config` interceptor. + When both interceptors are used, this `post_update_auto_migration_config_with_metadata` interceptor runs after the + `post_update_auto_migration_config` interceptor. The (possibly modified) response returned by + `post_update_auto_migration_config` will be passed to + `post_update_auto_migration_config_with_metadata`. + """ + return response, metadata + def pre_update_deployment( self, request: config.UpdateDeploymentRequest, @@ -3226,6 +3338,156 @@ def __call__( ) return resp + class _GetAutoMigrationConfig( + _BaseConfigRestTransport._BaseGetAutoMigrationConfig, ConfigRestStub + ): + def __hash__(self): + return hash("ConfigRestTransport.GetAutoMigrationConfig") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: config.GetAutoMigrationConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> config.AutoMigrationConfig: + r"""Call the get auto migration config method over HTTP. + + Args: + request (~.config.GetAutoMigrationConfigRequest): + The request object. The request message for the + GetAutoMigrationConfig method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.config.AutoMigrationConfig: + AutoMigrationConfig contains the + automigration configuration for a + project. + + """ + + http_options = ( + _BaseConfigRestTransport._BaseGetAutoMigrationConfig._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_auto_migration_config( + request, metadata + ) + transcoded_request = _BaseConfigRestTransport._BaseGetAutoMigrationConfig._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseConfigRestTransport._BaseGetAutoMigrationConfig._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.config_v1.ConfigClient.GetAutoMigrationConfig", + extra={ + "serviceName": "google.cloud.config.v1.Config", + "rpcName": "GetAutoMigrationConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ConfigRestTransport._GetAutoMigrationConfig._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = config.AutoMigrationConfig() + pb_resp = config.AutoMigrationConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_auto_migration_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_auto_migration_config_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = config.AutoMigrationConfig.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.config_v1.ConfigClient.get_auto_migration_config", + extra={ + "serviceName": "google.cloud.config.v1.Config", + "rpcName": "GetAutoMigrationConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _GetDeployment(_BaseConfigRestTransport._BaseGetDeployment, ConfigRestStub): def __hash__(self): return hash("ConfigRestTransport.GetDeployment") @@ -5805,6 +6067,161 @@ def __call__( ) return resp + class _UpdateAutoMigrationConfig( + _BaseConfigRestTransport._BaseUpdateAutoMigrationConfig, ConfigRestStub + ): + def __hash__(self): + return hash("ConfigRestTransport.UpdateAutoMigrationConfig") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: config.UpdateAutoMigrationConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the update auto migration + config method over HTTP. + + Args: + request (~.config.UpdateAutoMigrationConfigRequest): + The request object. The request message for the + UpdateAutoMigrationConfig method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseConfigRestTransport._BaseUpdateAutoMigrationConfig._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_auto_migration_config( + request, metadata + ) + transcoded_request = _BaseConfigRestTransport._BaseUpdateAutoMigrationConfig._get_transcoded_request( + http_options, request + ) + + body = _BaseConfigRestTransport._BaseUpdateAutoMigrationConfig._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseConfigRestTransport._BaseUpdateAutoMigrationConfig._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.config_v1.ConfigClient.UpdateAutoMigrationConfig", + extra={ + "serviceName": "google.cloud.config.v1.Config", + "rpcName": "UpdateAutoMigrationConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ConfigRestTransport._UpdateAutoMigrationConfig._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_auto_migration_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_auto_migration_config_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.config_v1.ConfigClient.update_auto_migration_config", + extra={ + "serviceName": "google.cloud.config.v1.Config", + "rpcName": "UpdateAutoMigrationConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _UpdateDeployment( _BaseConfigRestTransport._BaseUpdateDeployment, ConfigRestStub ): @@ -6038,6 +6455,14 @@ def export_revision_statefile( # In C++ this would require a dynamic_cast return self._ExportRevisionStatefile(self._session, self._host, self._interceptor) # type: ignore + @property + def get_auto_migration_config( + self, + ) -> Callable[[config.GetAutoMigrationConfigRequest], config.AutoMigrationConfig]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAutoMigrationConfig(self._session, self._host, self._interceptor) # type: ignore + @property def get_deployment( self, @@ -6174,6 +6599,14 @@ def unlock_deployment( # In C++ this would require a dynamic_cast return self._UnlockDeployment(self._session, self._host, self._interceptor) # type: ignore + @property + def update_auto_migration_config( + self, + ) -> Callable[[config.UpdateAutoMigrationConfigRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateAutoMigrationConfig(self._session, self._host, self._interceptor) # type: ignore + @property def update_deployment( self, diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest_base.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest_base.py index dafd98fb5249..b97d6c05063d 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest_base.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest_base.py @@ -577,6 +577,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseGetAutoMigrationConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/autoMigrationConfig}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = config.GetAutoMigrationConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseConfigRestTransport._BaseGetAutoMigrationConfig._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGetDeployment: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -1406,6 +1453,63 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseUpdateAutoMigrationConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{auto_migration_config.name=projects/*/locations/*/autoMigrationConfig}", + "body": "auto_migration_config", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = config.UpdateAutoMigrationConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseConfigRestTransport._BaseUpdateAutoMigrationConfig._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseUpdateDeployment: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-config/google/cloud/config_v1/types/__init__.py b/packages/google-cloud-config/google/cloud/config_v1/types/__init__.py index f672a503cc09..e06713bdc121 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/types/__init__.py +++ b/packages/google-cloud-config/google/cloud/config_v1/types/__init__.py @@ -15,6 +15,7 @@ # from .config import ( ApplyResults, + AutoMigrationConfig, CreateDeploymentRequest, CreatePreviewRequest, DeleteDeploymentRequest, @@ -27,6 +28,7 @@ ExportPreviewResultRequest, ExportPreviewResultResponse, ExportRevisionStatefileRequest, + GetAutoMigrationConfigRequest, GetDeploymentRequest, GetPreviewRequest, GetResourceChangeRequest, @@ -76,11 +78,13 @@ TerraformVariable, TerraformVersion, UnlockDeploymentRequest, + UpdateAutoMigrationConfigRequest, UpdateDeploymentRequest, ) __all__ = ( "ApplyResults", + "AutoMigrationConfig", "CreateDeploymentRequest", "CreatePreviewRequest", "DeleteDeploymentRequest", @@ -93,6 +97,7 @@ "ExportPreviewResultRequest", "ExportPreviewResultResponse", "ExportRevisionStatefileRequest", + "GetAutoMigrationConfigRequest", "GetDeploymentRequest", "GetPreviewRequest", "GetResourceChangeRequest", @@ -141,6 +146,7 @@ "TerraformVariable", "TerraformVersion", "UnlockDeploymentRequest", + "UpdateAutoMigrationConfigRequest", "UpdateDeploymentRequest", "QuotaValidation", ) diff --git a/packages/google-cloud-config/google/cloud/config_v1/types/config.py b/packages/google-cloud-config/google/cloud/config_v1/types/config.py index fc9a71020e51..319cdc5790ec 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/types/config.py +++ b/packages/google-cloud-config/google/cloud/config_v1/types/config.py @@ -89,6 +89,9 @@ "ListResourceDriftsResponse", "GetResourceDriftRequest", "ProviderConfig", + "GetAutoMigrationConfigRequest", + "AutoMigrationConfig", + "UpdateAutoMigrationConfigRequest", }, ) @@ -301,6 +304,9 @@ class ErrorCode(proto.Enum): BUCKET_CREATION_FAILED (8): Cloud Storage bucket creation failed due to an issue unrelated to permissions. + EXTERNAL_VALUE_SOURCE_IMPORT_FAILED (10): + Failed to import values from an external + source. """ ERROR_CODE_UNSPECIFIED = 0 REVISION_FAILED = 1 @@ -309,6 +315,7 @@ class ErrorCode(proto.Enum): DELETE_BUILD_RUN_FAILED = 6 BUCKET_CREATION_PERMISSION_DENIED = 7 BUCKET_CREATION_FAILED = 8 + EXTERNAL_VALUE_SOURCE_IMPORT_FAILED = 10 class LockState(proto.Enum): r"""Possible lock states of a deployment. @@ -1234,12 +1241,16 @@ class ErrorCode(proto.Enum): QUOTA_VALIDATION_FAILED (7): quota validation failed for one or more resources in terraform configuration files. + EXTERNAL_VALUE_SOURCE_IMPORT_FAILED (8): + Failed to import values from an external + source. """ ERROR_CODE_UNSPECIFIED = 0 CLOUD_BUILD_PERMISSION_DENIED = 1 APPLY_BUILD_API_FAILED = 4 APPLY_BUILD_RUN_FAILED = 5 QUOTA_VALIDATION_FAILED = 7 + EXTERNAL_VALUE_SOURCE_IMPORT_FAILED = 8 terraform_blueprint: "TerraformBlueprint" = proto.Field( proto.MESSAGE, @@ -2154,6 +2165,9 @@ class ErrorCode(proto.Enum): PREVIEW_BUILD_RUN_FAILED (6): Preview created a build but build failed and logs were generated. + EXTERNAL_VALUE_SOURCE_IMPORT_FAILED (7): + Failed to import values from an external + source. """ ERROR_CODE_UNSPECIFIED = 0 CLOUD_BUILD_PERMISSION_DENIED = 1 @@ -2162,6 +2176,7 @@ class ErrorCode(proto.Enum): DEPLOYMENT_LOCK_ACQUIRE_FAILED = 4 PREVIEW_BUILD_API_FAILED = 5 PREVIEW_BUILD_RUN_FAILED = 6 + EXTERNAL_VALUE_SOURCE_IMPORT_FAILED = 7 terraform_blueprint: "TerraformBlueprint" = proto.Field( proto.MESSAGE, @@ -3314,4 +3329,73 @@ class ProviderSource(proto.Enum): ) +class GetAutoMigrationConfigRequest(proto.Message): + r"""The request message for the GetAutoMigrationConfig method. + + Attributes: + name (str): + Required. The name of the AutoMigrationConfig. Format: + 'projects/{project_id}/locations/{location}/AutoMigrationConfig'. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class AutoMigrationConfig(proto.Message): + r"""AutoMigrationConfig contains the automigration configuration + for a project. + + Attributes: + name (str): + Identifier. The name of the AutoMigrationConfig. Format: + 'projects/{project_id}/locations/{location}/AutoMigrationConfig'. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time the AutoMigrationConfig was + last updated. + auto_migration_enabled (bool): + Optional. Whether the auto migration is + enabled for the project. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + auto_migration_enabled: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class UpdateAutoMigrationConfigRequest(proto.Message): + r"""The request message for the UpdateAutoMigrationConfig method. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The update mask applies to the resource. See + [google.protobuf.FieldMask][google.protobuf.FieldMask]. + auto_migration_config (google.cloud.config_v1.types.AutoMigrationConfig): + Required. The AutoMigrationConfig to update. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + auto_migration_config: "AutoMigrationConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="AutoMigrationConfig", + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_auto_migration_config_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_auto_migration_config_async.py new file mode 100644 index 000000000000..cb92cd4633bf --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_auto_migration_config_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAutoMigrationConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_GetAutoMigrationConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_get_auto_migration_config(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.GetAutoMigrationConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_auto_migration_config(request=request) + + # Handle the response + print(response) + + +# [END config_v1_generated_Config_GetAutoMigrationConfig_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_auto_migration_config_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_auto_migration_config_sync.py new file mode 100644 index 000000000000..9ac0616af8d0 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_auto_migration_config_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAutoMigrationConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_GetAutoMigrationConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_get_auto_migration_config(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.GetAutoMigrationConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_auto_migration_config(request=request) + + # Handle the response + print(response) + + +# [END config_v1_generated_Config_GetAutoMigrationConfig_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_update_auto_migration_config_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_update_auto_migration_config_async.py new file mode 100644 index 000000000000..835c91bbcc8d --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_update_auto_migration_config_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAutoMigrationConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_UpdateAutoMigrationConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_update_auto_migration_config(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.UpdateAutoMigrationConfigRequest() + + # Make the request + operation = client.update_auto_migration_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END config_v1_generated_Config_UpdateAutoMigrationConfig_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_update_auto_migration_config_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_update_auto_migration_config_sync.py new file mode 100644 index 000000000000..1bf9f9251d3d --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_update_auto_migration_config_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAutoMigrationConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_UpdateAutoMigrationConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_update_auto_migration_config(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.UpdateAutoMigrationConfigRequest() + + # Make the request + operation = client.update_auto_migration_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END config_v1_generated_Config_UpdateAutoMigrationConfig_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json b/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json index 5c38333ab775..6cb3994e71f1 100644 --- a/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json +++ b/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json @@ -1454,6 +1454,167 @@ ], "title": "config_v1_generated_config_export_revision_statefile_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.get_auto_migration_config", + "method": { + "fullName": "google.cloud.config.v1.Config.GetAutoMigrationConfig", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "GetAutoMigrationConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.GetAutoMigrationConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.config_v1.types.AutoMigrationConfig", + "shortName": "get_auto_migration_config" + }, + "description": "Sample for GetAutoMigrationConfig", + "file": "config_v1_generated_config_get_auto_migration_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_GetAutoMigrationConfig_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_get_auto_migration_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.get_auto_migration_config", + "method": { + "fullName": "google.cloud.config.v1.Config.GetAutoMigrationConfig", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "GetAutoMigrationConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.GetAutoMigrationConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.config_v1.types.AutoMigrationConfig", + "shortName": "get_auto_migration_config" + }, + "description": "Sample for GetAutoMigrationConfig", + "file": "config_v1_generated_config_get_auto_migration_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_GetAutoMigrationConfig_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_get_auto_migration_config_sync.py" + }, { "canonical": true, "clientMethod": { @@ -4207,6 +4368,175 @@ ], "title": "config_v1_generated_config_unlock_deployment_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.update_auto_migration_config", + "method": { + "fullName": "google.cloud.config.v1.Config.UpdateAutoMigrationConfig", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "UpdateAutoMigrationConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.UpdateAutoMigrationConfigRequest" + }, + { + "name": "auto_migration_config", + "type": "google.cloud.config_v1.types.AutoMigrationConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_auto_migration_config" + }, + "description": "Sample for UpdateAutoMigrationConfig", + "file": "config_v1_generated_config_update_auto_migration_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_UpdateAutoMigrationConfig_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_update_auto_migration_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.update_auto_migration_config", + "method": { + "fullName": "google.cloud.config.v1.Config.UpdateAutoMigrationConfig", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "UpdateAutoMigrationConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.UpdateAutoMigrationConfigRequest" + }, + { + "name": "auto_migration_config", + "type": "google.cloud.config_v1.types.AutoMigrationConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_auto_migration_config" + }, + "description": "Sample for UpdateAutoMigrationConfig", + "file": "config_v1_generated_config_update_auto_migration_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_UpdateAutoMigrationConfig_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_update_auto_migration_config_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py b/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py index 648afeb0d9e2..2bc00850fb8c 100644 --- a/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py +++ b/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py @@ -14,6 +14,7 @@ # limitations under the License. # import os +import re # try/except added for compatibility with python < 3.8 try: @@ -11723,6 +11724,705 @@ async def test_get_resource_drift_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + config.GetAutoMigrationConfigRequest, + dict, + ], +) +def test_get_auto_migration_config(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_auto_migration_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = config.AutoMigrationConfig( + name="name_value", + auto_migration_enabled=True, + ) + response = client.get_auto_migration_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = config.GetAutoMigrationConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, config.AutoMigrationConfig) + assert response.name == "name_value" + assert response.auto_migration_enabled is True + + +def test_get_auto_migration_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = config.GetAutoMigrationConfigRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_auto_migration_config), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_auto_migration_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.GetAutoMigrationConfigRequest( + name="name_value", + ) + + +def test_get_auto_migration_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_auto_migration_config + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_auto_migration_config + ] = mock_rpc + request = {} + client.get_auto_migration_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_auto_migration_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_auto_migration_config_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_auto_migration_config + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_auto_migration_config + ] = mock_rpc + + request = {} + await client.get_auto_migration_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_auto_migration_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_auto_migration_config_async( + transport: str = "grpc_asyncio", request_type=config.GetAutoMigrationConfigRequest +): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_auto_migration_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.AutoMigrationConfig( + name="name_value", + auto_migration_enabled=True, + ) + ) + response = await client.get_auto_migration_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = config.GetAutoMigrationConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, config.AutoMigrationConfig) + assert response.name == "name_value" + assert response.auto_migration_enabled is True + + +@pytest.mark.asyncio +async def test_get_auto_migration_config_async_from_dict(): + await test_get_auto_migration_config_async(request_type=dict) + + +def test_get_auto_migration_config_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.GetAutoMigrationConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_auto_migration_config), "__call__" + ) as call: + call.return_value = config.AutoMigrationConfig() + client.get_auto_migration_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_auto_migration_config_field_headers_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.GetAutoMigrationConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_auto_migration_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.AutoMigrationConfig() + ) + await client.get_auto_migration_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_auto_migration_config_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_auto_migration_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = config.AutoMigrationConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_auto_migration_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_auto_migration_config_flattened_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_auto_migration_config( + config.GetAutoMigrationConfigRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_auto_migration_config_flattened_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_auto_migration_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = config.AutoMigrationConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.AutoMigrationConfig() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_auto_migration_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_auto_migration_config_flattened_error_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_auto_migration_config( + config.GetAutoMigrationConfigRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.UpdateAutoMigrationConfigRequest, + dict, + ], +) +def test_update_auto_migration_config(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_auto_migration_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_auto_migration_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = config.UpdateAutoMigrationConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_auto_migration_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = config.UpdateAutoMigrationConfigRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_auto_migration_config), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_auto_migration_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.UpdateAutoMigrationConfigRequest() + + +def test_update_auto_migration_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_auto_migration_config + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_auto_migration_config + ] = mock_rpc + request = {} + client.update_auto_migration_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_auto_migration_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_auto_migration_config_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_auto_migration_config + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_auto_migration_config + ] = mock_rpc + + request = {} + await client.update_auto_migration_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_auto_migration_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_auto_migration_config_async( + transport: str = "grpc_asyncio", + request_type=config.UpdateAutoMigrationConfigRequest, +): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_auto_migration_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_auto_migration_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = config.UpdateAutoMigrationConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_auto_migration_config_async_from_dict(): + await test_update_auto_migration_config_async(request_type=dict) + + +def test_update_auto_migration_config_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.UpdateAutoMigrationConfigRequest() + + request.auto_migration_config.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_auto_migration_config), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_auto_migration_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "auto_migration_config.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_auto_migration_config_field_headers_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.UpdateAutoMigrationConfigRequest() + + request.auto_migration_config.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_auto_migration_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_auto_migration_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "auto_migration_config.name=name_value", + ) in kw["metadata"] + + +def test_update_auto_migration_config_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_auto_migration_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_auto_migration_config( + auto_migration_config=config.AutoMigrationConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].auto_migration_config + mock_val = config.AutoMigrationConfig(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_auto_migration_config_flattened_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_auto_migration_config( + config.UpdateAutoMigrationConfigRequest(), + auto_migration_config=config.AutoMigrationConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_auto_migration_config_flattened_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_auto_migration_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_auto_migration_config( + auto_migration_config=config.AutoMigrationConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].auto_migration_config + mock_val = config.AutoMigrationConfig(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_auto_migration_config_flattened_error_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_auto_migration_config( + config.UpdateAutoMigrationConfigRequest(), + auto_migration_config=config.AutoMigrationConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + def test_list_deployments_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -11737,7 +12437,671 @@ def test_list_deployments_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_deployments in client._transport._wrapped_methods + assert client._transport.list_deployments in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_deployments + ] = mock_rpc + + request = {} + client.list_deployments(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_deployments(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_deployments_rest_required_fields( + request_type=config.ListDeploymentsRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_deployments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_deployments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = config.ListDeploymentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_deployments(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_deployments_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_deployments._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_deployments_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.ListDeploymentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_deployments(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/deployments" + % client.transport._host, + args[1], + ) + + +def test_list_deployments_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_deployments( + config.ListDeploymentsRequest(), + parent="parent_value", + ) + + +def test_list_deployments_rest_pager(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + config.ListDeploymentsResponse( + deployments=[ + config.Deployment(), + config.Deployment(), + config.Deployment(), + ], + next_page_token="abc", + ), + config.ListDeploymentsResponse( + deployments=[], + next_page_token="def", + ), + config.ListDeploymentsResponse( + deployments=[ + config.Deployment(), + ], + next_page_token="ghi", + ), + config.ListDeploymentsResponse( + deployments=[ + config.Deployment(), + config.Deployment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(config.ListDeploymentsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_deployments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, config.Deployment) for i in results) + + pages = list(client.list_deployments(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_deployment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_deployment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_deployment] = mock_rpc + + request = {} + client.get_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_deployment_rest_required_fields(request_type=config.GetDeploymentRequest): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = config.Deployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_deployment_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_deployment_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.Deployment() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployments/*}" + % client.transport._host, + args[1], + ) + + +def test_get_deployment_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_deployment( + config.GetDeploymentRequest(), + name="name_value", + ) + + +def test_create_deployment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_deployment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_deployment + ] = mock_rpc + + request = {} + client.create_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_deployment_rest_required_fields( + request_type=config.CreateDeploymentRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["deployment_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "deploymentId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "deploymentId" in jsonified_request + assert jsonified_request["deploymentId"] == request_init["deployment_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["deploymentId"] = "deployment_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "deployment_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "deploymentId" in jsonified_request + assert jsonified_request["deploymentId"] == "deployment_id_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_deployment(request) + + expected_params = [ + ( + "deploymentId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_deployment_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "deploymentId", + "requestId", + ) + ) + & set( + ( + "parent", + "deploymentId", + "deployment", + ) + ) + ) + + +def test_create_deployment_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + deployment=config.Deployment( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + deployment_id="deployment_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/deployments" + % client.transport._host, + args[1], + ) + + +def test_create_deployment_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_deployment( + config.CreateDeploymentRequest(), + parent="parent_value", + deployment=config.Deployment( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + deployment_id="deployment_id_value", + ) + + +def test_update_deployment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_deployment in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -11745,29 +13109,32 @@ def test_list_deployments_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_deployments + client._transport.update_deployment ] = mock_rpc request = {} - client.list_deployments(request) + client.update_deployment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_deployments(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_deployments_rest_required_fields( - request_type=config.ListDeploymentsRequest, +def test_update_deployment_rest_required_fields( + request_type=config.UpdateDeploymentRequest, ): transport_class = transports.ConfigRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -11778,30 +13145,24 @@ def test_list_deployments_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_deployments._get_unset_required_fields(jsonified_request) + ).update_deployment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_deployments._get_unset_required_fields(jsonified_request) + ).update_deployment._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "order_by", - "page_size", - "page_token", + "request_id", + "update_mask", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11810,7 +13171,7 @@ def test_list_deployments_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.ListDeploymentsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -11822,49 +13183,45 @@ def test_list_deployments_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.ListDeploymentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_deployments(request) + response = client.update_deployment(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_deployments_rest_unset_required_fields(): +def test_update_deployment_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_deployments._get_unset_required_fields({}) + unset_fields = transport.update_deployment._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "requestId", + "updateMask", ) ) - & set(("parent",)) + & set(("deployment",)) ) -def test_list_deployments_rest_flattened(): +def test_update_deployment_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -11873,117 +13230,68 @@ def test_list_deployments_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ListDeploymentsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "deployment": { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + deployment=config.Deployment( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.ListDeploymentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_deployments(**mock_args) + client.update_deployment(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/deployments" + "%s/v1/{deployment.name=projects/*/locations/*/deployments/*}" % client.transport._host, args[1], - ) - - -def test_list_deployments_rest_flattened_error(transport: str = "rest"): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_deployments( - config.ListDeploymentsRequest(), - parent="parent_value", - ) - - -def test_list_deployments_rest_pager(transport: str = "rest"): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - config.ListDeploymentsResponse( - deployments=[ - config.Deployment(), - config.Deployment(), - config.Deployment(), - ], - next_page_token="abc", - ), - config.ListDeploymentsResponse( - deployments=[], - next_page_token="def", - ), - config.ListDeploymentsResponse( - deployments=[ - config.Deployment(), - ], - next_page_token="ghi", - ), - config.ListDeploymentsResponse( - deployments=[ - config.Deployment(), - config.Deployment(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(config.ListDeploymentsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} + ) - pager = client.list_deployments(request=sample_request) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, config.Deployment) for i in results) +def test_update_deployment_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - pages = list(client.list_deployments(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_deployment( + config.UpdateDeploymentRequest(), + deployment=config.Deployment( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) -def test_get_deployment_rest_use_cached_wrapped_rpc(): +def test_delete_deployment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11997,29 +13305,37 @@ def test_get_deployment_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_deployment in client._transport._wrapped_methods + assert client._transport.delete_deployment in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_deployment] = mock_rpc + client._transport._wrapped_methods[ + client._transport.delete_deployment + ] = mock_rpc request = {} - client.get_deployment(request) + client.delete_deployment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_deployment(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_deployment_rest_required_fields(request_type=config.GetDeploymentRequest): +def test_delete_deployment_rest_required_fields( + request_type=config.DeleteDeploymentRequest, +): transport_class = transports.ConfigRestTransport request_init = {} @@ -12034,7 +13350,7 @@ def test_get_deployment_rest_required_fields(request_type=config.GetDeploymentRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_deployment._get_unset_required_fields(jsonified_request) + ).delete_deployment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -12043,7 +13359,15 @@ def test_get_deployment_rest_required_fields(request_type=config.GetDeploymentRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_deployment._get_unset_required_fields(jsonified_request) + ).delete_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "delete_policy", + "force", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -12057,7 +13381,7 @@ def test_get_deployment_rest_required_fields(request_type=config.GetDeploymentRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.Deployment() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12069,39 +13393,45 @@ def test_get_deployment_rest_required_fields(request_type=config.GetDeploymentRe pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.Deployment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_deployment(request) + response = client.delete_deployment(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_deployment_rest_unset_required_fields(): +def test_delete_deployment_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_deployment._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.delete_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "deletePolicy", + "force", + "requestId", + ) + ) + & set(("name",)) + ) -def test_get_deployment_rest_flattened(): +def test_delete_deployment_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12110,7 +13440,7 @@ def test_get_deployment_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Deployment() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { @@ -12126,14 +13456,12 @@ def test_get_deployment_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.Deployment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_deployment(**mock_args) + client.delete_deployment(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -12146,7 +13474,7 @@ def test_get_deployment_rest_flattened(): ) -def test_get_deployment_rest_flattened_error(transport: str = "rest"): +def test_delete_deployment_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12155,13 +13483,13 @@ def test_get_deployment_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_deployment( - config.GetDeploymentRequest(), + client.delete_deployment( + config.DeleteDeploymentRequest(), name="name_value", ) -def test_create_deployment_rest_use_cached_wrapped_rpc(): +def test_list_revisions_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12175,42 +13503,33 @@ def test_create_deployment_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_deployment in client._transport._wrapped_methods + assert client._transport.list_revisions in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_deployment - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_revisions] = mock_rpc request = {} - client.create_deployment(request) + client.list_revisions(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_deployment(request) + client.list_revisions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_deployment_rest_required_fields( - request_type=config.CreateDeploymentRequest, -): +def test_list_revisions_rest_required_fields(request_type=config.ListRevisionsRequest): transport_class = transports.ConfigRestTransport request_init = {} request_init["parent"] = "" - request_init["deployment_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -12218,28 +13537,26 @@ def test_create_deployment_rest_required_fields( ) # verify fields with default values are dropped - assert "deploymentId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_deployment._get_unset_required_fields(jsonified_request) + ).list_revisions._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "deploymentId" in jsonified_request - assert jsonified_request["deploymentId"] == request_init["deployment_id"] jsonified_request["parent"] = "parent_value" - jsonified_request["deploymentId"] = "deployment_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_deployment._get_unset_required_fields(jsonified_request) + ).list_revisions._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "deployment_id", - "request_id", + "filter", + "order_by", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) @@ -12247,8 +13564,6 @@ def test_create_deployment_rest_required_fields( # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "deploymentId" in jsonified_request - assert jsonified_request["deploymentId"] == "deployment_id_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12257,7 +13572,7 @@ def test_create_deployment_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.ListRevisionsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12269,57 +13584,49 @@ def test_create_deployment_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.ListRevisionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_deployment(request) + response = client.list_revisions(request) - expected_params = [ - ( - "deploymentId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_deployment_rest_unset_required_fields(): +def test_list_revisions_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_deployment._get_unset_required_fields({}) + unset_fields = transport.list_revisions._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "deploymentId", - "requestId", - ) - ) - & set( - ( - "parent", - "deploymentId", - "deployment", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) -def test_create_deployment_rest_flattened(): +def test_list_revisions_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12328,66 +13635,121 @@ def test_create_deployment_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.ListRevisionsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/deployments/sample3" + } # get truthy value for each flattened field mock_args = dict( parent="parent_value", - deployment=config.Deployment( - terraform_blueprint=config.TerraformBlueprint( - gcs_source="gcs_source_value" - ) - ), - deployment_id="deployment_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.ListRevisionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_deployment(**mock_args) + client.list_revisions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/deployments" + "%s/v1/{parent=projects/*/locations/*/deployments/*}/revisions" % client.transport._host, args[1], ) -def test_create_deployment_rest_flattened_error(transport: str = "rest"): +def test_list_revisions_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_revisions( + config.ListRevisionsRequest(), + parent="parent_value", + ) + + +def test_list_revisions_rest_pager(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_deployment( - config.CreateDeploymentRequest(), - parent="parent_value", - deployment=config.Deployment( - terraform_blueprint=config.TerraformBlueprint( - gcs_source="gcs_source_value" - ) - ), - deployment_id="deployment_id_value", - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + config.ListRevisionsResponse( + revisions=[ + config.Revision(), + config.Revision(), + config.Revision(), + ], + next_page_token="abc", + ), + config.ListRevisionsResponse( + revisions=[], + next_page_token="def", + ), + config.ListRevisionsResponse( + revisions=[ + config.Revision(), + ], + next_page_token="ghi", + ), + config.ListRevisionsResponse( + revisions=[ + config.Revision(), + config.Revision(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(config.ListRevisionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/deployments/sample3" + } + + pager = client.list_revisions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, config.Revision) for i in results) + + pages = list(client.list_revisions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_update_deployment_rest_use_cached_wrapped_rpc(): +def test_get_revision_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12401,40 +13763,33 @@ def test_update_deployment_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_deployment in client._transport._wrapped_methods + assert client._transport.get_revision in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_deployment - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_revision] = mock_rpc request = {} - client.update_deployment(request) + client.get_revision(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_deployment(request) + client.get_revision(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_deployment_rest_required_fields( - request_type=config.UpdateDeploymentRequest, -): +def test_get_revision_rest_required_fields(request_type=config.GetRevisionRequest): transport_class = transports.ConfigRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -12445,24 +13800,21 @@ def test_update_deployment_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_deployment._get_unset_required_fields(jsonified_request) + ).get_revision._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_deployment._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "request_id", - "update_mask", - ) - ) + ).get_revision._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12471,7 +13823,7 @@ def test_update_deployment_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.Revision() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12483,45 +13835,39 @@ def test_update_deployment_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.Revision.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_deployment(request) + response = client.get_revision(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_deployment_rest_unset_required_fields(): +def test_get_revision_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_deployment._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "requestId", - "updateMask", - ) - ) - & set(("deployment",)) - ) + unset_fields = transport.get_revision._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_update_deployment_rest_flattened(): +def test_get_revision_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12530,48 +13876,43 @@ def test_update_deployment_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.Revision() # get arguments that satisfy an http rule for this method sample_request = { - "deployment": { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } + "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" } # get truthy value for each flattened field mock_args = dict( - deployment=config.Deployment( - terraform_blueprint=config.TerraformBlueprint( - gcs_source="gcs_source_value" - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.Revision.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_deployment(**mock_args) + client.get_revision(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{deployment.name=projects/*/locations/*/deployments/*}" + "%s/v1/{name=projects/*/locations/*/deployments/*/revisions/*}" % client.transport._host, args[1], ) -def test_update_deployment_rest_flattened_error(transport: str = "rest"): +def test_get_revision_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12580,18 +13921,13 @@ def test_update_deployment_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_deployment( - config.UpdateDeploymentRequest(), - deployment=config.Deployment( - terraform_blueprint=config.TerraformBlueprint( - gcs_source="gcs_source_value" - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_revision( + config.GetRevisionRequest(), + name="name_value", ) -def test_delete_deployment_rest_use_cached_wrapped_rpc(): +def test_get_resource_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12605,37 +13941,29 @@ def test_delete_deployment_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_deployment in client._transport._wrapped_methods + assert client._transport.get_resource in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_deployment - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_resource] = mock_rpc request = {} - client.delete_deployment(request) + client.get_resource(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_deployment(request) + client.get_resource(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_deployment_rest_required_fields( - request_type=config.DeleteDeploymentRequest, -): +def test_get_resource_rest_required_fields(request_type=config.GetResourceRequest): transport_class = transports.ConfigRestTransport request_init = {} @@ -12650,7 +13978,7 @@ def test_delete_deployment_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_deployment._get_unset_required_fields(jsonified_request) + ).get_resource._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -12659,15 +13987,7 @@ def test_delete_deployment_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_deployment._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "delete_policy", - "force", - "request_id", - ) - ) + ).get_resource._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -12681,7 +14001,7 @@ def test_delete_deployment_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.Resource() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12693,45 +14013,39 @@ def test_delete_deployment_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.Resource.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_deployment(request) + response = client.get_resource(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_deployment_rest_unset_required_fields(): +def test_get_resource_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_deployment._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "deletePolicy", - "force", - "requestId", - ) - ) - & set(("name",)) - ) + unset_fields = transport.get_resource._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_deployment_rest_flattened(): +def test_get_resource_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12740,11 +14054,11 @@ def test_delete_deployment_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.Resource() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3" + "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4/resources/sample5" } # get truthy value for each flattened field @@ -12756,25 +14070,27 @@ def test_delete_deployment_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.Resource.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_deployment(**mock_args) + client.get_resource(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}" + "%s/v1/{name=projects/*/locations/*/deployments/*/revisions/*/resources/*}" % client.transport._host, args[1], ) -def test_delete_deployment_rest_flattened_error(transport: str = "rest"): +def test_get_resource_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12783,13 +14099,13 @@ def test_delete_deployment_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_deployment( - config.DeleteDeploymentRequest(), + client.get_resource( + config.GetResourceRequest(), name="name_value", ) -def test_list_revisions_rest_use_cached_wrapped_rpc(): +def test_list_resources_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12803,29 +14119,29 @@ def test_list_revisions_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_revisions in client._transport._wrapped_methods + assert client._transport.list_resources in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_revisions] = mock_rpc + client._transport._wrapped_methods[client._transport.list_resources] = mock_rpc request = {} - client.list_revisions(request) + client.list_resources(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_revisions(request) + client.list_resources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_revisions_rest_required_fields(request_type=config.ListRevisionsRequest): +def test_list_resources_rest_required_fields(request_type=config.ListResourcesRequest): transport_class = transports.ConfigRestTransport request_init = {} @@ -12840,7 +14156,7 @@ def test_list_revisions_rest_required_fields(request_type=config.ListRevisionsRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_revisions._get_unset_required_fields(jsonified_request) + ).list_resources._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -12849,7 +14165,7 @@ def test_list_revisions_rest_required_fields(request_type=config.ListRevisionsRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_revisions._get_unset_required_fields(jsonified_request) + ).list_resources._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -12872,7 +14188,7 @@ def test_list_revisions_rest_required_fields(request_type=config.ListRevisionsRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.ListRevisionsResponse() + return_value = config.ListResourcesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12893,26 +14209,26 @@ def test_list_revisions_rest_required_fields(request_type=config.ListRevisionsRe response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListRevisionsResponse.pb(return_value) + return_value = config.ListResourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_revisions(request) + response = client.list_resources(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_revisions_rest_unset_required_fields(): +def test_list_resources_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_revisions._get_unset_required_fields({}) + unset_fields = transport.list_resources._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( @@ -12926,7 +14242,7 @@ def test_list_revisions_rest_unset_required_fields(): ) -def test_list_revisions_rest_flattened(): +def test_list_resources_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12935,11 +14251,11 @@ def test_list_revisions_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ListRevisionsResponse() + return_value = config.ListResourcesResponse() # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/deployments/sample3" + "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" } # get truthy value for each flattened field @@ -12952,26 +14268,26 @@ def test_list_revisions_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListRevisionsResponse.pb(return_value) + return_value = config.ListResourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_revisions(**mock_args) + client.list_resources(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/deployments/*}/revisions" + "%s/v1/{parent=projects/*/locations/*/deployments/*/revisions/*}/resources" % client.transport._host, args[1], ) -def test_list_revisions_rest_flattened_error(transport: str = "rest"): +def test_list_resources_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12980,13 +14296,13 @@ def test_list_revisions_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_revisions( - config.ListRevisionsRequest(), + client.list_resources( + config.ListResourcesRequest(), parent="parent_value", ) -def test_list_revisions_rest_pager(transport: str = "rest"): +def test_list_resources_rest_pager(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12998,28 +14314,28 @@ def test_list_revisions_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - config.ListRevisionsResponse( - revisions=[ - config.Revision(), - config.Revision(), - config.Revision(), + config.ListResourcesResponse( + resources=[ + config.Resource(), + config.Resource(), + config.Resource(), ], next_page_token="abc", ), - config.ListRevisionsResponse( - revisions=[], + config.ListResourcesResponse( + resources=[], next_page_token="def", ), - config.ListRevisionsResponse( - revisions=[ - config.Revision(), + config.ListResourcesResponse( + resources=[ + config.Resource(), ], next_page_token="ghi", ), - config.ListRevisionsResponse( - revisions=[ - config.Revision(), - config.Revision(), + config.ListResourcesResponse( + resources=[ + config.Resource(), + config.Resource(), ], ), ) @@ -13027,7 +14343,7 @@ def test_list_revisions_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple(config.ListRevisionsResponse.to_json(x) for x in response) + response = tuple(config.ListResourcesResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") @@ -13035,21 +14351,21 @@ def test_list_revisions_rest_pager(transport: str = "rest"): req.side_effect = return_values sample_request = { - "parent": "projects/sample1/locations/sample2/deployments/sample3" + "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" } - pager = client.list_revisions(request=sample_request) + pager = client.list_resources(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, config.Revision) for i in results) + assert all(isinstance(i, config.Resource) for i in results) - pages = list(client.list_revisions(request=sample_request).pages) + pages = list(client.list_resources(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -def test_get_revision_rest_use_cached_wrapped_rpc(): +def test_export_deployment_statefile_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13063,33 +14379,40 @@ def test_get_revision_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_revision in client._transport._wrapped_methods + assert ( + client._transport.export_deployment_statefile + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_revision] = mock_rpc + client._transport._wrapped_methods[ + client._transport.export_deployment_statefile + ] = mock_rpc request = {} - client.get_revision(request) + client.export_deployment_statefile(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_revision(request) + client.export_deployment_statefile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_revision_rest_required_fields(request_type=config.GetRevisionRequest): +def test_export_deployment_statefile_rest_required_fields( + request_type=config.ExportDeploymentStatefileRequest, +): transport_class = transports.ConfigRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13100,21 +14423,21 @@ def test_get_revision_rest_required_fields(request_type=config.GetRevisionReques unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_revision._get_unset_required_fields(jsonified_request) + ).export_deployment_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_revision._get_unset_required_fields(jsonified_request) + ).export_deployment_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13123,7 +14446,7 @@ def test_get_revision_rest_required_fields(request_type=config.GetRevisionReques request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.Revision() + return_value = config.Statefile() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -13135,99 +14458,40 @@ def test_get_revision_rest_required_fields(request_type=config.GetRevisionReques pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.Revision.pb(return_value) + return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_revision(request) + response = client.export_deployment_statefile(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_revision_rest_unset_required_fields(): +def test_export_deployment_statefile_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_revision._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -def test_get_revision_rest_flattened(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = config.Revision() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.Revision.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_revision(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*/revisions/*}" - % client.transport._host, - args[1], - ) - - -def test_get_revision_rest_flattened_error(transport: str = "rest"): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_revision( - config.GetRevisionRequest(), - name="name_value", - ) + unset_fields = transport.export_deployment_statefile._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) -def test_get_resource_rest_use_cached_wrapped_rpc(): +def test_export_revision_statefile_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13241,33 +14505,40 @@ def test_get_resource_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_resource in client._transport._wrapped_methods + assert ( + client._transport.export_revision_statefile + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_resource] = mock_rpc + client._transport._wrapped_methods[ + client._transport.export_revision_statefile + ] = mock_rpc request = {} - client.get_resource(request) + client.export_revision_statefile(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_resource(request) + client.export_revision_statefile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_resource_rest_required_fields(request_type=config.GetResourceRequest): +def test_export_revision_statefile_rest_required_fields( + request_type=config.ExportRevisionStatefileRequest, +): transport_class = transports.ConfigRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13278,21 +14549,21 @@ def test_get_resource_rest_required_fields(request_type=config.GetResourceReques unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_resource._get_unset_required_fields(jsonified_request) + ).export_revision_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_resource._get_unset_required_fields(jsonified_request) + ).export_revision_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13301,7 +14572,7 @@ def test_get_resource_rest_required_fields(request_type=config.GetResourceReques request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.Resource() + return_value = config.Statefile() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -13313,99 +14584,40 @@ def test_get_resource_rest_required_fields(request_type=config.GetResourceReques pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.Resource.pb(return_value) + return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_resource(request) + response = client.export_revision_statefile(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_resource_rest_unset_required_fields(): +def test_export_revision_statefile_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_resource._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -def test_get_resource_rest_flattened(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = config.Resource() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4/resources/sample5" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.Resource.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_resource(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*/revisions/*/resources/*}" - % client.transport._host, - args[1], - ) - - -def test_get_resource_rest_flattened_error(transport: str = "rest"): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_resource( - config.GetResourceRequest(), - name="name_value", - ) + unset_fields = transport.export_revision_statefile._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) -def test_list_resources_rest_use_cached_wrapped_rpc(): +def test_import_statefile_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13419,33 +14631,38 @@ def test_list_resources_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_resources in client._transport._wrapped_methods + assert client._transport.import_statefile in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_resources] = mock_rpc + client._transport._wrapped_methods[ + client._transport.import_statefile + ] = mock_rpc request = {} - client.list_resources(request) + client.import_statefile(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_resources(request) + client.import_statefile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_resources_rest_required_fields(request_type=config.ListResourcesRequest): +def test_import_statefile_rest_required_fields( + request_type=config.ImportStatefileRequest, +): transport_class = transports.ConfigRestTransport request_init = {} request_init["parent"] = "" + request_init["lock_id"] = 0 request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13456,30 +14673,24 @@ def test_list_resources_rest_required_fields(request_type=config.ListResourcesRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_resources._get_unset_required_fields(jsonified_request) + ).import_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" + jsonified_request["lockId"] = 725 unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_resources._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + credentials=ga_credentials.AnonymousCredentials() + ).import_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "lockId" in jsonified_request + assert jsonified_request["lockId"] == 725 client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13488,7 +14699,7 @@ def test_list_resources_rest_required_fields(request_type=config.ListResourcesRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.ListResourcesResponse() + return_value = config.Statefile() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -13500,49 +14711,48 @@ def test_list_resources_rest_required_fields(request_type=config.ListResourcesRe pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListResourcesResponse.pb(return_value) + return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_resources(request) + response = client.import_statefile(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_resources_rest_unset_required_fields(): +def test_import_statefile_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_resources._get_unset_required_fields({}) + unset_fields = transport.import_statefile._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(()) + & set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "parent", + "lockId", ) ) - & set(("parent",)) ) -def test_list_resources_rest_flattened(): +def test_import_statefile_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13551,16 +14761,17 @@ def test_list_resources_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ListResourcesResponse() + return_value = config.Statefile() # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + "parent": "projects/sample1/locations/sample2/deployments/sample3" } # get truthy value for each flattened field mock_args = dict( parent="parent_value", + lock_id=725, ) mock_args.update(sample_request) @@ -13568,26 +14779,26 @@ def test_list_resources_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListResourcesResponse.pb(return_value) + return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_resources(**mock_args) + client.import_statefile(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/deployments/*/revisions/*}/resources" + "%s/v1/{parent=projects/*/locations/*/deployments/*}:importState" % client.transport._host, args[1], ) -def test_list_resources_rest_flattened_error(transport: str = "rest"): +def test_import_statefile_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13596,76 +14807,14 @@ def test_list_resources_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_resources( - config.ListResourcesRequest(), + client.import_statefile( + config.ImportStatefileRequest(), parent="parent_value", + lock_id=725, ) -def test_list_resources_rest_pager(transport: str = "rest"): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - config.ListResourcesResponse( - resources=[ - config.Resource(), - config.Resource(), - config.Resource(), - ], - next_page_token="abc", - ), - config.ListResourcesResponse( - resources=[], - next_page_token="def", - ), - config.ListResourcesResponse( - resources=[ - config.Resource(), - ], - next_page_token="ghi", - ), - config.ListResourcesResponse( - resources=[ - config.Resource(), - config.Resource(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(config.ListResourcesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" - } - - pager = client.list_resources(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, config.Resource) for i in results) - - pages = list(client.list_resources(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_export_deployment_statefile_rest_use_cached_wrapped_rpc(): +def test_delete_statefile_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13679,10 +14828,7 @@ def test_export_deployment_statefile_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.export_deployment_statefile - in client._transport._wrapped_methods - ) + assert client._transport.delete_statefile in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -13690,29 +14836,30 @@ def test_export_deployment_statefile_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.export_deployment_statefile + client._transport.delete_statefile ] = mock_rpc request = {} - client.export_deployment_statefile(request) + client.delete_statefile(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.export_deployment_statefile(request) + client.delete_statefile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_export_deployment_statefile_rest_required_fields( - request_type=config.ExportDeploymentStatefileRequest, +def test_delete_statefile_rest_required_fields( + request_type=config.DeleteStatefileRequest, ): transport_class = transports.ConfigRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" + request_init["lock_id"] = 0 request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13723,21 +14870,24 @@ def test_export_deployment_statefile_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_deployment_statefile._get_unset_required_fields(jsonified_request) + ).delete_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" + jsonified_request["lockId"] = 725 unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_deployment_statefile._get_unset_required_fields(jsonified_request) + ).delete_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "lockId" in jsonified_request + assert jsonified_request["lockId"] == 725 client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13746,7 +14896,7 @@ def test_export_deployment_statefile_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.Statefile() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -13766,158 +14916,95 @@ def test_export_deployment_statefile_rest_required_fields( response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.Statefile.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.export_deployment_statefile(request) + response = client.delete_statefile(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_export_deployment_statefile_rest_unset_required_fields(): +def test_delete_statefile_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.export_deployment_statefile._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) - - -def test_export_revision_statefile_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.export_revision_statefile - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + unset_fields = transport.delete_statefile._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "lockId", + ) ) - client._transport._wrapped_methods[ - client._transport.export_revision_statefile - ] = mock_rpc - - request = {} - client.export_revision_statefile(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.export_revision_statefile(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_export_revision_statefile_rest_required_fields( - request_type=config.ExportRevisionStatefileRequest, -): - transport_class = transports.ConfigRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).export_revision_statefile._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).export_revision_statefile._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" +def test_delete_statefile_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = config.Statefile() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - response_value = Response() - response_value.status_code = 200 + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } - # Convert return value to protobuf type - return_value = config.Statefile.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.export_revision_statefile(request) + client.delete_statefile(**mock_args) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployments/*}:deleteState" + % client.transport._host, + args[1], + ) -def test_export_revision_statefile_rest_unset_required_fields(): - transport = transports.ConfigRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_delete_statefile_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - unset_fields = transport.export_revision_statefile._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_statefile( + config.DeleteStatefileRequest(), + name="name_value", + ) -def test_import_statefile_rest_use_cached_wrapped_rpc(): +def test_lock_deployment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13931,38 +15018,39 @@ def test_import_statefile_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.import_statefile in client._transport._wrapped_methods + assert client._transport.lock_deployment in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.import_statefile - ] = mock_rpc + client._transport._wrapped_methods[client._transport.lock_deployment] = mock_rpc request = {} - client.import_statefile(request) + client.lock_deployment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.import_statefile(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.lock_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_import_statefile_rest_required_fields( - request_type=config.ImportStatefileRequest, +def test_lock_deployment_rest_required_fields( + request_type=config.LockDeploymentRequest, ): transport_class = transports.ConfigRestTransport request_init = {} - request_init["parent"] = "" - request_init["lock_id"] = 0 + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13973,24 +15061,21 @@ def test_import_statefile_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).import_statefile._get_unset_required_fields(jsonified_request) + ).lock_deployment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - jsonified_request["lockId"] = 725 + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).import_statefile._get_unset_required_fields(jsonified_request) + ).lock_deployment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "lockId" in jsonified_request - assert jsonified_request["lockId"] == 725 + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13999,7 +15084,7 @@ def test_import_statefile_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.Statefile() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14019,40 +15104,29 @@ def test_import_statefile_rest_required_fields( response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.import_statefile(request) + response = client.lock_deployment(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_import_statefile_rest_unset_required_fields(): +def test_lock_deployment_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.import_statefile._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "lockId", - ) - ) - ) + unset_fields = transport.lock_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_import_statefile_rest_flattened(): +def test_lock_deployment_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14061,44 +15135,41 @@ def test_import_statefile_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Statefile() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/deployments/sample3" + "name": "projects/sample1/locations/sample2/deployments/sample3" } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - lock_id=725, + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.import_statefile(**mock_args) + client.lock_deployment(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/deployments/*}:importState" + "%s/v1/{name=projects/*/locations/*/deployments/*}:lock" % client.transport._host, args[1], ) -def test_import_statefile_rest_flattened_error(transport: str = "rest"): +def test_lock_deployment_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14107,14 +15178,13 @@ def test_import_statefile_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.import_statefile( - config.ImportStatefileRequest(), - parent="parent_value", - lock_id=725, + client.lock_deployment( + config.LockDeploymentRequest(), + name="name_value", ) -def test_delete_statefile_rest_use_cached_wrapped_rpc(): +def test_unlock_deployment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14128,7 +15198,7 @@ def test_delete_statefile_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_statefile in client._transport._wrapped_methods + assert client._transport.unlock_deployment in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -14136,24 +15206,28 @@ def test_delete_statefile_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_statefile + client._transport.unlock_deployment ] = mock_rpc request = {} - client.delete_statefile(request) + client.unlock_deployment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_statefile(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.unlock_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_statefile_rest_required_fields( - request_type=config.DeleteStatefileRequest, +def test_unlock_deployment_rest_required_fields( + request_type=config.UnlockDeploymentRequest, ): transport_class = transports.ConfigRestTransport @@ -14170,7 +15244,7 @@ def test_delete_statefile_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_statefile._get_unset_required_fields(jsonified_request) + ).unlock_deployment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -14180,7 +15254,7 @@ def test_delete_statefile_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_statefile._get_unset_required_fields(jsonified_request) + ).unlock_deployment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -14196,7 +15270,7 @@ def test_delete_statefile_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14216,25 +15290,25 @@ def test_delete_statefile_rest_required_fields( response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_statefile(request) + response = client.unlock_deployment(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_statefile_rest_unset_required_fields(): +def test_unlock_deployment_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_statefile._get_unset_required_fields({}) + unset_fields = transport.unlock_deployment._get_unset_required_fields({}) assert set(unset_fields) == ( set(()) & set( @@ -14246,7 +15320,7 @@ def test_delete_statefile_rest_unset_required_fields(): ) -def test_delete_statefile_rest_flattened(): +def test_unlock_deployment_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14255,7 +15329,7 @@ def test_delete_statefile_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { @@ -14265,31 +15339,32 @@ def test_delete_statefile_rest_flattened(): # get truthy value for each flattened field mock_args = dict( name="name_value", + lock_id=725, ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_statefile(**mock_args) + client.unlock_deployment(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}:deleteState" + "%s/v1/{name=projects/*/locations/*/deployments/*}:unlock" % client.transport._host, args[1], ) -def test_delete_statefile_rest_flattened_error(transport: str = "rest"): +def test_unlock_deployment_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14298,13 +15373,14 @@ def test_delete_statefile_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_statefile( - config.DeleteStatefileRequest(), + client.unlock_deployment( + config.UnlockDeploymentRequest(), name="name_value", + lock_id=725, ) -def test_lock_deployment_rest_use_cached_wrapped_rpc(): +def test_export_lock_info_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14318,34 +15394,32 @@ def test_lock_deployment_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.lock_deployment in client._transport._wrapped_methods + assert client._transport.export_lock_info in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.lock_deployment] = mock_rpc + client._transport._wrapped_methods[ + client._transport.export_lock_info + ] = mock_rpc request = {} - client.lock_deployment(request) + client.export_lock_info(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.lock_deployment(request) + client.export_lock_info(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_lock_deployment_rest_required_fields( - request_type=config.LockDeploymentRequest, +def test_export_lock_info_rest_required_fields( + request_type=config.ExportLockInfoRequest, ): transport_class = transports.ConfigRestTransport @@ -14361,7 +15435,7 @@ def test_lock_deployment_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).lock_deployment._get_unset_required_fields(jsonified_request) + ).export_lock_info._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -14370,7 +15444,7 @@ def test_lock_deployment_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).lock_deployment._get_unset_required_fields(jsonified_request) + ).export_lock_info._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -14384,7 +15458,7 @@ def test_lock_deployment_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.LockInfo() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14396,37 +15470,39 @@ def test_lock_deployment_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.LockInfo.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.lock_deployment(request) + response = client.export_lock_info(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_lock_deployment_rest_unset_required_fields(): +def test_export_lock_info_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.lock_deployment._get_unset_required_fields({}) + unset_fields = transport.export_lock_info._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_lock_deployment_rest_flattened(): +def test_export_lock_info_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14435,7 +15511,7 @@ def test_lock_deployment_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.LockInfo() # get arguments that satisfy an http rule for this method sample_request = { @@ -14451,25 +15527,27 @@ def test_lock_deployment_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.LockInfo.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.lock_deployment(**mock_args) + client.export_lock_info(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}:lock" + "%s/v1/{name=projects/*/locations/*/deployments/*}:exportLock" % client.transport._host, args[1], ) -def test_lock_deployment_rest_flattened_error(transport: str = "rest"): +def test_export_lock_info_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14478,13 +15556,13 @@ def test_lock_deployment_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.lock_deployment( - config.LockDeploymentRequest(), + client.export_lock_info( + config.ExportLockInfoRequest(), name="name_value", ) -def test_unlock_deployment_rest_use_cached_wrapped_rpc(): +def test_create_preview_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14498,19 +15576,17 @@ def test_unlock_deployment_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.unlock_deployment in client._transport._wrapped_methods + assert client._transport.create_preview in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.unlock_deployment - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_preview] = mock_rpc request = {} - client.unlock_deployment(request) + client.create_preview(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -14519,21 +15595,18 @@ def test_unlock_deployment_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.unlock_deployment(request) + client.create_preview(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_unlock_deployment_rest_required_fields( - request_type=config.UnlockDeploymentRequest, -): +def test_create_preview_rest_required_fields(request_type=config.CreatePreviewRequest): transport_class = transports.ConfigRestTransport request_init = {} - request_init["name"] = "" - request_init["lock_id"] = 0 + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -14544,24 +15617,28 @@ def test_unlock_deployment_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).unlock_deployment._get_unset_required_fields(jsonified_request) + ).create_preview._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - jsonified_request["lockId"] = 725 + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).unlock_deployment._get_unset_required_fields(jsonified_request) + ).create_preview._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "preview_id", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - assert "lockId" in jsonified_request - assert jsonified_request["lockId"] == 725 + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14596,31 +15673,36 @@ def test_unlock_deployment_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.unlock_deployment(request) + response = client.create_preview(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_unlock_deployment_rest_unset_required_fields(): +def test_create_preview_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.unlock_deployment._get_unset_required_fields({}) + unset_fields = transport.create_preview._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set( + ( + "previewId", + "requestId", + ) + ) & set( ( - "name", - "lockId", + "parent", + "preview", ) ) ) -def test_unlock_deployment_rest_flattened(): +def test_create_preview_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14632,14 +15714,16 @@ def test_unlock_deployment_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", - lock_id=725, + parent="parent_value", + preview=config.Preview( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), ) mock_args.update(sample_request) @@ -14651,20 +15735,19 @@ def test_unlock_deployment_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.unlock_deployment(**mock_args) + client.create_preview(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}:unlock" - % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/previews" % client.transport._host, args[1], ) -def test_unlock_deployment_rest_flattened_error(transport: str = "rest"): +def test_create_preview_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14673,14 +15756,18 @@ def test_unlock_deployment_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.unlock_deployment( - config.UnlockDeploymentRequest(), - name="name_value", - lock_id=725, + client.create_preview( + config.CreatePreviewRequest(), + parent="parent_value", + preview=config.Preview( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), ) -def test_export_lock_info_rest_use_cached_wrapped_rpc(): +def test_get_preview_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14694,33 +15781,29 @@ def test_export_lock_info_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.export_lock_info in client._transport._wrapped_methods + assert client._transport.get_preview in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.export_lock_info - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_preview] = mock_rpc request = {} - client.export_lock_info(request) + client.get_preview(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.export_lock_info(request) + client.get_preview(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_export_lock_info_rest_required_fields( - request_type=config.ExportLockInfoRequest, -): +def test_get_preview_rest_required_fields(request_type=config.GetPreviewRequest): transport_class = transports.ConfigRestTransport request_init = {} @@ -14735,7 +15818,7 @@ def test_export_lock_info_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_lock_info._get_unset_required_fields(jsonified_request) + ).get_preview._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -14744,7 +15827,7 @@ def test_export_lock_info_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_lock_info._get_unset_required_fields(jsonified_request) + ).get_preview._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -14758,7 +15841,7 @@ def test_export_lock_info_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.LockInfo() + return_value = config.Preview() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14779,30 +15862,30 @@ def test_export_lock_info_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.LockInfo.pb(return_value) + return_value = config.Preview.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.export_lock_info(request) + response = client.get_preview(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_export_lock_info_rest_unset_required_fields(): +def test_get_preview_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.export_lock_info._get_unset_required_fields({}) + unset_fields = transport.get_preview._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_export_lock_info_rest_flattened(): +def test_get_preview_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14811,12 +15894,10 @@ def test_export_lock_info_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.LockInfo() + return_value = config.Preview() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } + sample_request = {"name": "projects/sample1/locations/sample2/previews/sample3"} # get truthy value for each flattened field mock_args = dict( @@ -14828,26 +15909,25 @@ def test_export_lock_info_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.LockInfo.pb(return_value) + return_value = config.Preview.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.export_lock_info(**mock_args) + client.get_preview(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}:exportLock" - % client.transport._host, + "%s/v1/{name=projects/*/locations/*/previews/*}" % client.transport._host, args[1], ) -def test_export_lock_info_rest_flattened_error(transport: str = "rest"): +def test_get_preview_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14856,13 +15936,13 @@ def test_export_lock_info_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.export_lock_info( - config.ExportLockInfoRequest(), + client.get_preview( + config.GetPreviewRequest(), name="name_value", ) -def test_create_preview_rest_use_cached_wrapped_rpc(): +def test_list_previews_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14876,33 +15956,29 @@ def test_create_preview_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_preview in client._transport._wrapped_methods + assert client._transport.list_previews in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_preview] = mock_rpc + client._transport._wrapped_methods[client._transport.list_previews] = mock_rpc request = {} - client.create_preview(request) + client.list_previews(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_preview(request) + client.list_previews(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_preview_rest_required_fields(request_type=config.CreatePreviewRequest): +def test_list_previews_rest_required_fields(request_type=config.ListPreviewsRequest): transport_class = transports.ConfigRestTransport request_init = {} @@ -14917,7 +15993,7 @@ def test_create_preview_rest_required_fields(request_type=config.CreatePreviewRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_preview._get_unset_required_fields(jsonified_request) + ).list_previews._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -14926,12 +16002,14 @@ def test_create_preview_rest_required_fields(request_type=config.CreatePreviewRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_preview._get_unset_required_fields(jsonified_request) + ).list_previews._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "preview_id", - "request_id", + "filter", + "order_by", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) @@ -14947,7 +16025,7 @@ def test_create_preview_rest_required_fields(request_type=config.CreatePreviewRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.ListPreviewsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14959,50 +16037,49 @@ def test_create_preview_rest_required_fields(request_type=config.CreatePreviewRe pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.ListPreviewsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_preview(request) + response = client.list_previews(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_preview_rest_unset_required_fields(): +def test_list_previews_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_preview._get_unset_required_fields({}) + unset_fields = transport.list_previews._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "previewId", - "requestId", - ) - ) - & set( - ( - "parent", - "preview", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) -def test_create_preview_rest_flattened(): +def test_list_previews_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15011,7 +16088,7 @@ def test_create_preview_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.ListPreviewsResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -15019,23 +16096,20 @@ def test_create_preview_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - preview=config.Preview( - terraform_blueprint=config.TerraformBlueprint( - gcs_source="gcs_source_value" - ) - ), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.ListPreviewsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_preview(**mock_args) + client.list_previews(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -15047,27 +16121,83 @@ def test_create_preview_rest_flattened(): ) -def test_create_preview_rest_flattened_error(transport: str = "rest"): +def test_list_previews_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_previews( + config.ListPreviewsRequest(), + parent="parent_value", + ) + + +def test_list_previews_rest_pager(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_preview( - config.CreatePreviewRequest(), - parent="parent_value", - preview=config.Preview( - terraform_blueprint=config.TerraformBlueprint( - gcs_source="gcs_source_value" - ) - ), - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + config.ListPreviewsResponse( + previews=[ + config.Preview(), + config.Preview(), + config.Preview(), + ], + next_page_token="abc", + ), + config.ListPreviewsResponse( + previews=[], + next_page_token="def", + ), + config.ListPreviewsResponse( + previews=[ + config.Preview(), + ], + next_page_token="ghi", + ), + config.ListPreviewsResponse( + previews=[ + config.Preview(), + config.Preview(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(config.ListPreviewsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_previews(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, config.Preview) for i in results) + + pages = list(client.list_previews(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_get_preview_rest_use_cached_wrapped_rpc(): +def test_delete_preview_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15081,29 +16211,33 @@ def test_get_preview_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_preview in client._transport._wrapped_methods + assert client._transport.delete_preview in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_preview] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_preview] = mock_rpc request = {} - client.get_preview(request) + client.delete_preview(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_preview(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_preview(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_preview_rest_required_fields(request_type=config.GetPreviewRequest): +def test_delete_preview_rest_required_fields(request_type=config.DeletePreviewRequest): transport_class = transports.ConfigRestTransport request_init = {} @@ -15118,7 +16252,7 @@ def test_get_preview_rest_required_fields(request_type=config.GetPreviewRequest) unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_preview._get_unset_required_fields(jsonified_request) + ).delete_preview._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -15127,7 +16261,9 @@ def test_get_preview_rest_required_fields(request_type=config.GetPreviewRequest) unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_preview._get_unset_required_fields(jsonified_request) + ).delete_preview._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -15141,7 +16277,7 @@ def test_get_preview_rest_required_fields(request_type=config.GetPreviewRequest) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.Preview() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15153,39 +16289,36 @@ def test_get_preview_rest_required_fields(request_type=config.GetPreviewRequest) pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.Preview.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_preview(request) + response = client.delete_preview(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_preview_rest_unset_required_fields(): +def test_delete_preview_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_preview._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.delete_preview._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) -def test_get_preview_rest_flattened(): +def test_delete_preview_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15194,7 +16327,7 @@ def test_get_preview_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Preview() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = {"name": "projects/sample1/locations/sample2/previews/sample3"} @@ -15208,14 +16341,12 @@ def test_get_preview_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.Preview.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_preview(**mock_args) + client.delete_preview(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -15227,7 +16358,7 @@ def test_get_preview_rest_flattened(): ) -def test_get_preview_rest_flattened_error(transport: str = "rest"): +def test_delete_preview_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15236,13 +16367,13 @@ def test_get_preview_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_preview( - config.GetPreviewRequest(), + client.delete_preview( + config.DeletePreviewRequest(), name="name_value", ) -def test_list_previews_rest_use_cached_wrapped_rpc(): +def test_export_preview_result_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15256,29 +16387,36 @@ def test_list_previews_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_previews in client._transport._wrapped_methods + assert ( + client._transport.export_preview_result + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_previews] = mock_rpc + client._transport._wrapped_methods[ + client._transport.export_preview_result + ] = mock_rpc request = {} - client.list_previews(request) + client.export_preview_result(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_previews(request) + client.export_preview_result(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_previews_rest_required_fields(request_type=config.ListPreviewsRequest): +def test_export_preview_result_rest_required_fields( + request_type=config.ExportPreviewResultRequest, +): transport_class = transports.ConfigRestTransport request_init = {} @@ -15293,7 +16431,7 @@ def test_list_previews_rest_required_fields(request_type=config.ListPreviewsRequ unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_previews._get_unset_required_fields(jsonified_request) + ).export_preview_result._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -15302,16 +16440,7 @@ def test_list_previews_rest_required_fields(request_type=config.ListPreviewsRequ unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_previews._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + ).export_preview_result._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -15325,7 +16454,7 @@ def test_list_previews_rest_required_fields(request_type=config.ListPreviewsRequ request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.ListPreviewsResponse() + return_value = config.ExportPreviewResultResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15337,167 +16466,40 @@ def test_list_previews_rest_required_fields(request_type=config.ListPreviewsRequ pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListPreviewsResponse.pb(return_value) + return_value = config.ExportPreviewResultResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_previews(request) + response = client.export_preview_result(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_previews_rest_unset_required_fields(): +def test_export_preview_result_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_previews._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -def test_list_previews_rest_flattened(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = config.ListPreviewsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.ListPreviewsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_previews(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/previews" % client.transport._host, - args[1], - ) - - -def test_list_previews_rest_flattened_error(transport: str = "rest"): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_previews( - config.ListPreviewsRequest(), - parent="parent_value", - ) - - -def test_list_previews_rest_pager(transport: str = "rest"): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - config.ListPreviewsResponse( - previews=[ - config.Preview(), - config.Preview(), - config.Preview(), - ], - next_page_token="abc", - ), - config.ListPreviewsResponse( - previews=[], - next_page_token="def", - ), - config.ListPreviewsResponse( - previews=[ - config.Preview(), - ], - next_page_token="ghi", - ), - config.ListPreviewsResponse( - previews=[ - config.Preview(), - config.Preview(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(config.ListPreviewsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_previews(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, config.Preview) for i in results) - - pages = list(client.list_previews(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + unset_fields = transport.export_preview_result._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) -def test_delete_preview_rest_use_cached_wrapped_rpc(): +def test_list_terraform_versions_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15511,37 +16513,40 @@ def test_delete_preview_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_preview in client._transport._wrapped_methods + assert ( + client._transport.list_terraform_versions + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_preview] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_terraform_versions + ] = mock_rpc request = {} - client.delete_preview(request) + client.list_terraform_versions(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_preview(request) + client.list_terraform_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_preview_rest_required_fields(request_type=config.DeletePreviewRequest): +def test_list_terraform_versions_rest_required_fields( + request_type=config.ListTerraformVersionsRequest, +): transport_class = transports.ConfigRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15552,23 +16557,30 @@ def test_delete_preview_rest_required_fields(request_type=config.DeletePreviewRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_preview._get_unset_required_fields(jsonified_request) + ).list_terraform_versions._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_preview._get_unset_required_fields(jsonified_request) + ).list_terraform_versions._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15577,7 +16589,7 @@ def test_delete_preview_rest_required_fields(request_type=config.DeletePreviewRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.ListTerraformVersionsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15589,36 +16601,49 @@ def test_delete_preview_rest_required_fields(request_type=config.DeletePreviewRe pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.ListTerraformVersionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_preview(request) + response = client.list_terraform_versions(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_preview_rest_unset_required_fields(): +def test_list_terraform_versions_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_preview._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + unset_fields = transport.list_terraform_versions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -def test_delete_preview_rest_flattened(): +def test_list_terraform_versions_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15627,38 +16652,41 @@ def test_delete_preview_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.ListTerraformVersionsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/previews/sample3"} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.ListTerraformVersionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_preview(**mock_args) + client.list_terraform_versions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/previews/*}" % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/terraformVersions" + % client.transport._host, args[1], ) -def test_delete_preview_rest_flattened_error(transport: str = "rest"): +def test_list_terraform_versions_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15667,13 +16695,76 @@ def test_delete_preview_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_preview( - config.DeletePreviewRequest(), - name="name_value", + client.list_terraform_versions( + config.ListTerraformVersionsRequest(), + parent="parent_value", ) -def test_export_preview_result_rest_use_cached_wrapped_rpc(): +def test_list_terraform_versions_rest_pager(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + config.ListTerraformVersionsResponse( + terraform_versions=[ + config.TerraformVersion(), + config.TerraformVersion(), + config.TerraformVersion(), + ], + next_page_token="abc", + ), + config.ListTerraformVersionsResponse( + terraform_versions=[], + next_page_token="def", + ), + config.ListTerraformVersionsResponse( + terraform_versions=[ + config.TerraformVersion(), + ], + next_page_token="ghi", + ), + config.ListTerraformVersionsResponse( + terraform_versions=[ + config.TerraformVersion(), + config.TerraformVersion(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + config.ListTerraformVersionsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_terraform_versions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, config.TerraformVersion) for i in results) + + pages = list(client.list_terraform_versions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_terraform_version_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15688,7 +16779,7 @@ def test_export_preview_result_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.export_preview_result + client._transport.get_terraform_version in client._transport._wrapped_methods ) @@ -15698,29 +16789,29 @@ def test_export_preview_result_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.export_preview_result + client._transport.get_terraform_version ] = mock_rpc request = {} - client.export_preview_result(request) + client.get_terraform_version(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.export_preview_result(request) + client.get_terraform_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_export_preview_result_rest_required_fields( - request_type=config.ExportPreviewResultRequest, +def test_get_terraform_version_rest_required_fields( + request_type=config.GetTerraformVersionRequest, ): transport_class = transports.ConfigRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15731,21 +16822,21 @@ def test_export_preview_result_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_preview_result._get_unset_required_fields(jsonified_request) + ).get_terraform_version._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_preview_result._get_unset_required_fields(jsonified_request) + ).get_terraform_version._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15754,7 +16845,7 @@ def test_export_preview_result_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.ExportPreviewResultResponse() + return_value = config.TerraformVersion() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15766,40 +16857,99 @@ def test_export_preview_result_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ExportPreviewResultResponse.pb(return_value) + return_value = config.TerraformVersion.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.export_preview_result(request) + response = client.get_terraform_version(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_export_preview_result_rest_unset_required_fields(): +def test_get_terraform_version_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.export_preview_result._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.get_terraform_version._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_list_terraform_versions_rest_use_cached_wrapped_rpc(): +def test_get_terraform_version_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.TerraformVersion() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/terraformVersions/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.TerraformVersion.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_terraform_version(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/terraformVersions/*}" + % client.transport._host, + args[1], + ) + + +def test_get_terraform_version_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_terraform_version( + config.GetTerraformVersionRequest(), + name="name_value", + ) + + +def test_list_resource_changes_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15814,7 +16964,7 @@ def test_list_terraform_versions_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_terraform_versions + client._transport.list_resource_changes in client._transport._wrapped_methods ) @@ -15824,24 +16974,24 @@ def test_list_terraform_versions_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_terraform_versions + client._transport.list_resource_changes ] = mock_rpc request = {} - client.list_terraform_versions(request) + client.list_resource_changes(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_terraform_versions(request) + client.list_resource_changes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 - -def test_list_terraform_versions_rest_required_fields( - request_type=config.ListTerraformVersionsRequest, + +def test_list_resource_changes_rest_required_fields( + request_type=config.ListResourceChangesRequest, ): transport_class = transports.ConfigRestTransport @@ -15857,7 +17007,7 @@ def test_list_terraform_versions_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_terraform_versions._get_unset_required_fields(jsonified_request) + ).list_resource_changes._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -15866,7 +17016,7 @@ def test_list_terraform_versions_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_terraform_versions._get_unset_required_fields(jsonified_request) + ).list_resource_changes._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -15889,7 +17039,7 @@ def test_list_terraform_versions_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.ListTerraformVersionsResponse() + return_value = config.ListResourceChangesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15910,26 +17060,26 @@ def test_list_terraform_versions_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListTerraformVersionsResponse.pb(return_value) + return_value = config.ListResourceChangesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_terraform_versions(request) + response = client.list_resource_changes(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_terraform_versions_rest_unset_required_fields(): +def test_list_resource_changes_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_terraform_versions._get_unset_required_fields({}) + unset_fields = transport.list_resource_changes._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( @@ -15943,7 +17093,7 @@ def test_list_terraform_versions_rest_unset_required_fields(): ) -def test_list_terraform_versions_rest_flattened(): +def test_list_resource_changes_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15952,10 +17102,12 @@ def test_list_terraform_versions_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ListTerraformVersionsResponse() + return_value = config.ListResourceChangesResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/previews/sample3" + } # get truthy value for each flattened field mock_args = dict( @@ -15967,26 +17119,26 @@ def test_list_terraform_versions_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListTerraformVersionsResponse.pb(return_value) + return_value = config.ListResourceChangesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_terraform_versions(**mock_args) + client.list_resource_changes(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/terraformVersions" + "%s/v1/{parent=projects/*/locations/*/previews/*}/resourceChanges" % client.transport._host, args[1], ) -def test_list_terraform_versions_rest_flattened_error(transport: str = "rest"): +def test_list_resource_changes_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15995,13 +17147,13 @@ def test_list_terraform_versions_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_terraform_versions( - config.ListTerraformVersionsRequest(), + client.list_resource_changes( + config.ListResourceChangesRequest(), parent="parent_value", ) -def test_list_terraform_versions_rest_pager(transport: str = "rest"): +def test_list_resource_changes_rest_pager(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16013,28 +17165,28 @@ def test_list_terraform_versions_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - config.ListTerraformVersionsResponse( - terraform_versions=[ - config.TerraformVersion(), - config.TerraformVersion(), - config.TerraformVersion(), + config.ListResourceChangesResponse( + resource_changes=[ + config.ResourceChange(), + config.ResourceChange(), + config.ResourceChange(), ], next_page_token="abc", ), - config.ListTerraformVersionsResponse( - terraform_versions=[], + config.ListResourceChangesResponse( + resource_changes=[], next_page_token="def", ), - config.ListTerraformVersionsResponse( - terraform_versions=[ - config.TerraformVersion(), + config.ListResourceChangesResponse( + resource_changes=[ + config.ResourceChange(), ], next_page_token="ghi", ), - config.ListTerraformVersionsResponse( - terraform_versions=[ - config.TerraformVersion(), - config.TerraformVersion(), + config.ListResourceChangesResponse( + resource_changes=[ + config.ResourceChange(), + config.ResourceChange(), ], ), ) @@ -16043,7 +17195,7 @@ def test_list_terraform_versions_rest_pager(transport: str = "rest"): # Wrap the values into proper Response objs response = tuple( - config.ListTerraformVersionsResponse.to_json(x) for x in response + config.ListResourceChangesResponse.to_json(x) for x in response ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): @@ -16051,20 +17203,22 @@ def test_list_terraform_versions_rest_pager(transport: str = "rest"): return_val.status_code = 200 req.side_effect = return_values - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/previews/sample3" + } - pager = client.list_terraform_versions(request=sample_request) + pager = client.list_resource_changes(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, config.TerraformVersion) for i in results) + assert all(isinstance(i, config.ResourceChange) for i in results) - pages = list(client.list_terraform_versions(request=sample_request).pages) + pages = list(client.list_resource_changes(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -def test_get_terraform_version_rest_use_cached_wrapped_rpc(): +def test_get_resource_change_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16079,8 +17233,7 @@ def test_get_terraform_version_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_terraform_version - in client._transport._wrapped_methods + client._transport.get_resource_change in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -16089,24 +17242,24 @@ def test_get_terraform_version_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_terraform_version + client._transport.get_resource_change ] = mock_rpc request = {} - client.get_terraform_version(request) + client.get_resource_change(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_terraform_version(request) + client.get_resource_change(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_terraform_version_rest_required_fields( - request_type=config.GetTerraformVersionRequest, +def test_get_resource_change_rest_required_fields( + request_type=config.GetResourceChangeRequest, ): transport_class = transports.ConfigRestTransport @@ -16122,7 +17275,7 @@ def test_get_terraform_version_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_terraform_version._get_unset_required_fields(jsonified_request) + ).get_resource_change._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -16131,7 +17284,7 @@ def test_get_terraform_version_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_terraform_version._get_unset_required_fields(jsonified_request) + ).get_resource_change._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -16145,7 +17298,7 @@ def test_get_terraform_version_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.TerraformVersion() + return_value = config.ResourceChange() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16166,30 +17319,30 @@ def test_get_terraform_version_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.TerraformVersion.pb(return_value) + return_value = config.ResourceChange.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_terraform_version(request) + response = client.get_resource_change(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_terraform_version_rest_unset_required_fields(): +def test_get_resource_change_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_terraform_version._get_unset_required_fields({}) + unset_fields = transport.get_resource_change._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_terraform_version_rest_flattened(): +def test_get_resource_change_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16198,11 +17351,11 @@ def test_get_terraform_version_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.TerraformVersion() + return_value = config.ResourceChange() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/terraformVersions/sample3" + "name": "projects/sample1/locations/sample2/previews/sample3/resourceChanges/sample4" } # get truthy value for each flattened field @@ -16215,26 +17368,26 @@ def test_get_terraform_version_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.TerraformVersion.pb(return_value) + return_value = config.ResourceChange.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_terraform_version(**mock_args) + client.get_resource_change(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/terraformVersions/*}" + "%s/v1/{name=projects/*/locations/*/previews/*/resourceChanges/*}" % client.transport._host, args[1], ) -def test_get_terraform_version_rest_flattened_error(transport: str = "rest"): +def test_get_resource_change_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16243,13 +17396,13 @@ def test_get_terraform_version_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_terraform_version( - config.GetTerraformVersionRequest(), + client.get_resource_change( + config.GetResourceChangeRequest(), name="name_value", ) -def test_list_resource_changes_rest_use_cached_wrapped_rpc(): +def test_list_resource_drifts_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16264,8 +17417,7 @@ def test_list_resource_changes_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_resource_changes - in client._transport._wrapped_methods + client._transport.list_resource_drifts in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -16274,24 +17426,24 @@ def test_list_resource_changes_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_resource_changes + client._transport.list_resource_drifts ] = mock_rpc request = {} - client.list_resource_changes(request) + client.list_resource_drifts(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_resource_changes(request) + client.list_resource_drifts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_resource_changes_rest_required_fields( - request_type=config.ListResourceChangesRequest, +def test_list_resource_drifts_rest_required_fields( + request_type=config.ListResourceDriftsRequest, ): transport_class = transports.ConfigRestTransport @@ -16307,7 +17459,7 @@ def test_list_resource_changes_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_resource_changes._get_unset_required_fields(jsonified_request) + ).list_resource_drifts._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -16316,7 +17468,7 @@ def test_list_resource_changes_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_resource_changes._get_unset_required_fields(jsonified_request) + ).list_resource_drifts._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -16339,7 +17491,7 @@ def test_list_resource_changes_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.ListResourceChangesResponse() + return_value = config.ListResourceDriftsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16360,26 +17512,26 @@ def test_list_resource_changes_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListResourceChangesResponse.pb(return_value) + return_value = config.ListResourceDriftsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_resource_changes(request) + response = client.list_resource_drifts(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_resource_changes_rest_unset_required_fields(): +def test_list_resource_drifts_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_resource_changes._get_unset_required_fields({}) + unset_fields = transport.list_resource_drifts._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( @@ -16393,7 +17545,7 @@ def test_list_resource_changes_rest_unset_required_fields(): ) -def test_list_resource_changes_rest_flattened(): +def test_list_resource_drifts_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16402,7 +17554,7 @@ def test_list_resource_changes_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ListResourceChangesResponse() + return_value = config.ListResourceDriftsResponse() # get arguments that satisfy an http rule for this method sample_request = { @@ -16419,26 +17571,26 @@ def test_list_resource_changes_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListResourceChangesResponse.pb(return_value) + return_value = config.ListResourceDriftsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_resource_changes(**mock_args) + client.list_resource_drifts(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/previews/*}/resourceChanges" + "%s/v1/{parent=projects/*/locations/*/previews/*}/resourceDrifts" % client.transport._host, args[1], ) -def test_list_resource_changes_rest_flattened_error(transport: str = "rest"): +def test_list_resource_drifts_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16447,13 +17599,13 @@ def test_list_resource_changes_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_resource_changes( - config.ListResourceChangesRequest(), + client.list_resource_drifts( + config.ListResourceDriftsRequest(), parent="parent_value", ) -def test_list_resource_changes_rest_pager(transport: str = "rest"): +def test_list_resource_drifts_rest_pager(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16465,28 +17617,28 @@ def test_list_resource_changes_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - config.ListResourceChangesResponse( - resource_changes=[ - config.ResourceChange(), - config.ResourceChange(), - config.ResourceChange(), + config.ListResourceDriftsResponse( + resource_drifts=[ + config.ResourceDrift(), + config.ResourceDrift(), + config.ResourceDrift(), ], next_page_token="abc", ), - config.ListResourceChangesResponse( - resource_changes=[], + config.ListResourceDriftsResponse( + resource_drifts=[], next_page_token="def", ), - config.ListResourceChangesResponse( - resource_changes=[ - config.ResourceChange(), + config.ListResourceDriftsResponse( + resource_drifts=[ + config.ResourceDrift(), ], next_page_token="ghi", ), - config.ListResourceChangesResponse( - resource_changes=[ - config.ResourceChange(), - config.ResourceChange(), + config.ListResourceDriftsResponse( + resource_drifts=[ + config.ResourceDrift(), + config.ResourceDrift(), ], ), ) @@ -16494,9 +17646,7 @@ def test_list_resource_changes_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple( - config.ListResourceChangesResponse.to_json(x) for x in response - ) + response = tuple(config.ListResourceDriftsResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") @@ -16507,18 +17657,18 @@ def test_list_resource_changes_rest_pager(transport: str = "rest"): "parent": "projects/sample1/locations/sample2/previews/sample3" } - pager = client.list_resource_changes(request=sample_request) + pager = client.list_resource_drifts(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, config.ResourceChange) for i in results) + assert all(isinstance(i, config.ResourceDrift) for i in results) - pages = list(client.list_resource_changes(request=sample_request).pages) + pages = list(client.list_resource_drifts(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -def test_get_resource_change_rest_use_cached_wrapped_rpc(): +def test_get_resource_drift_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16533,7 +17683,7 @@ def test_get_resource_change_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_resource_change in client._transport._wrapped_methods + client._transport.get_resource_drift in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -16542,24 +17692,24 @@ def test_get_resource_change_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_resource_change + client._transport.get_resource_drift ] = mock_rpc request = {} - client.get_resource_change(request) + client.get_resource_drift(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_resource_change(request) + client.get_resource_drift(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_resource_change_rest_required_fields( - request_type=config.GetResourceChangeRequest, +def test_get_resource_drift_rest_required_fields( + request_type=config.GetResourceDriftRequest, ): transport_class = transports.ConfigRestTransport @@ -16575,7 +17725,7 @@ def test_get_resource_change_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_resource_change._get_unset_required_fields(jsonified_request) + ).get_resource_drift._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -16584,7 +17734,7 @@ def test_get_resource_change_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_resource_change._get_unset_required_fields(jsonified_request) + ).get_resource_drift._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -16598,7 +17748,7 @@ def test_get_resource_change_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.ResourceChange() + return_value = config.ResourceDrift() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16619,30 +17769,30 @@ def test_get_resource_change_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ResourceChange.pb(return_value) + return_value = config.ResourceDrift.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_resource_change(request) + response = client.get_resource_drift(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_resource_change_rest_unset_required_fields(): +def test_get_resource_drift_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_resource_change._get_unset_required_fields({}) + unset_fields = transport.get_resource_drift._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_resource_change_rest_flattened(): +def test_get_resource_drift_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16651,11 +17801,11 @@ def test_get_resource_change_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ResourceChange() + return_value = config.ResourceDrift() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/previews/sample3/resourceChanges/sample4" + "name": "projects/sample1/locations/sample2/previews/sample3/resourceDrifts/sample4" } # get truthy value for each flattened field @@ -16668,26 +17818,26 @@ def test_get_resource_change_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ResourceChange.pb(return_value) + return_value = config.ResourceDrift.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_resource_change(**mock_args) + client.get_resource_drift(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/previews/*/resourceChanges/*}" + "%s/v1/{name=projects/*/locations/*/previews/*/resourceDrifts/*}" % client.transport._host, args[1], ) -def test_get_resource_change_rest_flattened_error(transport: str = "rest"): +def test_get_resource_drift_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16695,14 +17845,14 @@ def test_get_resource_change_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. - with pytest.raises(ValueError): - client.get_resource_change( - config.GetResourceChangeRequest(), + with pytest.raises(ValueError): + client.get_resource_drift( + config.GetResourceDriftRequest(), name="name_value", ) -def test_list_resource_drifts_rest_use_cached_wrapped_rpc(): +def test_get_auto_migration_config_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16717,7 +17867,8 @@ def test_list_resource_drifts_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_resource_drifts in client._transport._wrapped_methods + client._transport.get_auto_migration_config + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -16726,29 +17877,29 @@ def test_list_resource_drifts_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_resource_drifts + client._transport.get_auto_migration_config ] = mock_rpc request = {} - client.list_resource_drifts(request) + client.get_auto_migration_config(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_resource_drifts(request) + client.get_auto_migration_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_resource_drifts_rest_required_fields( - request_type=config.ListResourceDriftsRequest, +def test_get_auto_migration_config_rest_required_fields( + request_type=config.GetAutoMigrationConfigRequest, ): transport_class = transports.ConfigRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16759,30 +17910,21 @@ def test_list_resource_drifts_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_resource_drifts._get_unset_required_fields(jsonified_request) + ).get_auto_migration_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_resource_drifts._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + ).get_auto_migration_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16791,7 +17933,7 @@ def test_list_resource_drifts_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.ListResourceDriftsResponse() + return_value = config.AutoMigrationConfig() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16812,40 +17954,30 @@ def test_list_resource_drifts_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListResourceDriftsResponse.pb(return_value) + return_value = config.AutoMigrationConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_resource_drifts(request) + response = client.get_auto_migration_config(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_resource_drifts_rest_unset_required_fields(): +def test_get_auto_migration_config_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_resource_drifts._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_auto_migration_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_list_resource_drifts_rest_flattened(): +def test_get_auto_migration_config_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16854,16 +17986,16 @@ def test_list_resource_drifts_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ListResourceDriftsResponse() + return_value = config.AutoMigrationConfig() # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/previews/sample3" + "name": "projects/sample1/locations/sample2/autoMigrationConfig" } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -16871,26 +18003,26 @@ def test_list_resource_drifts_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListResourceDriftsResponse.pb(return_value) + return_value = config.AutoMigrationConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_resource_drifts(**mock_args) + client.get_auto_migration_config(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/previews/*}/resourceDrifts" + "%s/v1/{name=projects/*/locations/*/autoMigrationConfig}" % client.transport._host, args[1], ) -def test_list_resource_drifts_rest_flattened_error(transport: str = "rest"): +def test_get_auto_migration_config_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16899,76 +18031,13 @@ def test_list_resource_drifts_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_resource_drifts( - config.ListResourceDriftsRequest(), - parent="parent_value", - ) - - -def test_list_resource_drifts_rest_pager(transport: str = "rest"): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - config.ListResourceDriftsResponse( - resource_drifts=[ - config.ResourceDrift(), - config.ResourceDrift(), - config.ResourceDrift(), - ], - next_page_token="abc", - ), - config.ListResourceDriftsResponse( - resource_drifts=[], - next_page_token="def", - ), - config.ListResourceDriftsResponse( - resource_drifts=[ - config.ResourceDrift(), - ], - next_page_token="ghi", - ), - config.ListResourceDriftsResponse( - resource_drifts=[ - config.ResourceDrift(), - config.ResourceDrift(), - ], - ), + client.get_auto_migration_config( + config.GetAutoMigrationConfigRequest(), + name="name_value", ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(config.ListResourceDriftsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/previews/sample3" - } - - pager = client.list_resource_drifts(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, config.ResourceDrift) for i in results) - - pages = list(client.list_resource_drifts(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token -def test_get_resource_drift_rest_use_cached_wrapped_rpc(): +def test_update_auto_migration_config_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16983,7 +18052,8 @@ def test_get_resource_drift_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_resource_drift in client._transport._wrapped_methods + client._transport.update_auto_migration_config + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -16992,29 +18062,32 @@ def test_get_resource_drift_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_resource_drift + client._transport.update_auto_migration_config ] = mock_rpc request = {} - client.get_resource_drift(request) + client.update_auto_migration_config(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_resource_drift(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_auto_migration_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_resource_drift_rest_required_fields( - request_type=config.GetResourceDriftRequest, +def test_update_auto_migration_config_rest_required_fields( + request_type=config.UpdateAutoMigrationConfigRequest, ): transport_class = transports.ConfigRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17025,21 +18098,19 @@ def test_get_resource_drift_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_resource_drift._get_unset_required_fields(jsonified_request) + ).update_auto_migration_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_resource_drift._get_unset_required_fields(jsonified_request) + ).update_auto_migration_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17048,7 +18119,7 @@ def test_get_resource_drift_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.ResourceDrift() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17060,39 +18131,37 @@ def test_get_resource_drift_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.ResourceDrift.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_resource_drift(request) + response = client.update_auto_migration_config(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_resource_drift_rest_unset_required_fields(): +def test_update_auto_migration_config_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_resource_drift._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_auto_migration_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("autoMigrationConfig",))) -def test_get_resource_drift_rest_flattened(): +def test_update_auto_migration_config_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17101,43 +18170,44 @@ def test_get_resource_drift_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ResourceDrift() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/previews/sample3/resourceDrifts/sample4" + "auto_migration_config": { + "name": "projects/sample1/locations/sample2/autoMigrationConfig" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + auto_migration_config=config.AutoMigrationConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.ResourceDrift.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_resource_drift(**mock_args) + client.update_auto_migration_config(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/previews/*/resourceDrifts/*}" + "%s/v1/{auto_migration_config.name=projects/*/locations/*/autoMigrationConfig}" % client.transport._host, args[1], ) -def test_get_resource_drift_rest_flattened_error(transport: str = "rest"): +def test_update_auto_migration_config_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17146,9 +18216,10 @@ def test_get_resource_drift_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_resource_drift( - config.GetResourceDriftRequest(), - name="name_value", + client.update_auto_migration_config( + config.UpdateAutoMigrationConfigRequest(), + auto_migration_config=config.AutoMigrationConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -17851,6 +18922,52 @@ def test_get_resource_drift_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_auto_migration_config_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_auto_migration_config), "__call__" + ) as call: + call.return_value = config.AutoMigrationConfig() + client.get_auto_migration_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.GetAutoMigrationConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_auto_migration_config_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_auto_migration_config), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_auto_migration_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.UpdateAutoMigrationConfigRequest() + + assert args[0] == request_msg + + def test_transport_kind_grpc_asyncio(): transport = ConfigAsyncClient.get_transport_class("grpc_asyncio")( credentials=async_anonymous_credentials() @@ -18651,7 +19768,64 @@ async def test_get_resource_drift_empty_call_grpc_asyncio(): # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = config.GetResourceDriftRequest() + request_msg = config.GetResourceDriftRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_auto_migration_config_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_auto_migration_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.AutoMigrationConfig( + name="name_value", + auto_migration_enabled=True, + ) + ) + await client.get_auto_migration_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.GetAutoMigrationConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_auto_migration_config_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_auto_migration_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_auto_migration_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.UpdateAutoMigrationConfigRequest() assert args[0] == request_msg @@ -22167,19 +23341,275 @@ def test_get_resource_change_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = config.ResourceChange.to_json(config.ResourceChange()) + return_value = config.ResourceChange.to_json(config.ResourceChange()) + req.return_value.content = return_value + + request = config.GetResourceChangeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = config.ResourceChange() + post_with_metadata.return_value = config.ResourceChange(), metadata + + client.get_resource_change( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_resource_drifts_rest_bad_request( + request_type=config.ListResourceDriftsRequest, +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/previews/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_resource_drifts(request) + + +@pytest.mark.parametrize( + "request_type", + [ + config.ListResourceDriftsRequest, + dict, + ], +) +def test_list_resource_drifts_rest_call_success(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/previews/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.ListResourceDriftsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.ListResourceDriftsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_resource_drifts(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListResourceDriftsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_resource_drifts_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConfigRestInterceptor, "post_list_resource_drifts" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "post_list_resource_drifts_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.ConfigRestInterceptor, "pre_list_resource_drifts" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = config.ListResourceDriftsRequest.pb( + config.ListResourceDriftsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = config.ListResourceDriftsResponse.to_json( + config.ListResourceDriftsResponse() + ) + req.return_value.content = return_value + + request = config.ListResourceDriftsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = config.ListResourceDriftsResponse() + post_with_metadata.return_value = config.ListResourceDriftsResponse(), metadata + + client.list_resource_drifts( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_resource_drift_rest_bad_request( + request_type=config.GetResourceDriftRequest, +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/previews/sample3/resourceDrifts/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_resource_drift(request) + + +@pytest.mark.parametrize( + "request_type", + [ + config.GetResourceDriftRequest, + dict, + ], +) +def test_get_resource_drift_rest_call_success(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/previews/sample3/resourceDrifts/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.ResourceDrift( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.ResourceDrift.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_resource_drift(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, config.ResourceDrift) + assert response.name == "name_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_resource_drift_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConfigRestInterceptor, "post_get_resource_drift" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "post_get_resource_drift_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.ConfigRestInterceptor, "pre_get_resource_drift" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = config.GetResourceDriftRequest.pb(config.GetResourceDriftRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = config.ResourceDrift.to_json(config.ResourceDrift()) req.return_value.content = return_value - request = config.GetResourceChangeRequest() + request = config.GetResourceDriftRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.ResourceChange() - post_with_metadata.return_value = config.ResourceChange(), metadata + post.return_value = config.ResourceDrift() + post_with_metadata.return_value = config.ResourceDrift(), metadata - client.get_resource_change( + client.get_resource_drift( request, metadata=[ ("key", "val"), @@ -22192,14 +23622,14 @@ def test_get_resource_change_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_resource_drifts_rest_bad_request( - request_type=config.ListResourceDriftsRequest, +def test_get_auto_migration_config_rest_bad_request( + request_type=config.GetAutoMigrationConfigRequest, ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/previews/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/autoMigrationConfig"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22214,31 +23644,31 @@ def test_list_resource_drifts_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_resource_drifts(request) + client.get_auto_migration_config(request) @pytest.mark.parametrize( "request_type", [ - config.ListResourceDriftsRequest, + config.GetAutoMigrationConfigRequest, dict, ], ) -def test_list_resource_drifts_rest_call_success(request_type): +def test_get_auto_migration_config_rest_call_success(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/previews/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/autoMigrationConfig"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ListResourceDriftsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = config.AutoMigrationConfig( + name="name_value", + auto_migration_enabled=True, ) # Wrap the value into a proper Response obj @@ -22246,21 +23676,21 @@ def test_list_resource_drifts_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListResourceDriftsResponse.pb(return_value) + return_value = config.AutoMigrationConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_resource_drifts(request) + response = client.get_auto_migration_config(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListResourceDriftsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, config.AutoMigrationConfig) + assert response.name == "name_value" + assert response.auto_migration_enabled is True @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_resource_drifts_rest_interceptors(null_interceptor): +def test_get_auto_migration_config_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -22272,17 +23702,17 @@ def test_list_resource_drifts_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_list_resource_drifts" + transports.ConfigRestInterceptor, "post_get_auto_migration_config" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "post_list_resource_drifts_with_metadata" + transports.ConfigRestInterceptor, "post_get_auto_migration_config_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.ConfigRestInterceptor, "pre_list_resource_drifts" + transports.ConfigRestInterceptor, "pre_get_auto_migration_config" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.ListResourceDriftsRequest.pb( - config.ListResourceDriftsRequest() + pb_message = config.GetAutoMigrationConfigRequest.pb( + config.GetAutoMigrationConfigRequest() ) transcode.return_value = { "method": "post", @@ -22294,21 +23724,19 @@ def test_list_resource_drifts_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = config.ListResourceDriftsResponse.to_json( - config.ListResourceDriftsResponse() - ) + return_value = config.AutoMigrationConfig.to_json(config.AutoMigrationConfig()) req.return_value.content = return_value - request = config.ListResourceDriftsRequest() + request = config.GetAutoMigrationConfigRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.ListResourceDriftsResponse() - post_with_metadata.return_value = config.ListResourceDriftsResponse(), metadata + post.return_value = config.AutoMigrationConfig() + post_with_metadata.return_value = config.AutoMigrationConfig(), metadata - client.list_resource_drifts( + client.get_auto_migration_config( request, metadata=[ ("key", "val"), @@ -22321,15 +23749,17 @@ def test_list_resource_drifts_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_resource_drift_rest_bad_request( - request_type=config.GetResourceDriftRequest, +def test_update_auto_migration_config_rest_bad_request( + request_type=config.UpdateAutoMigrationConfigRequest, ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/previews/sample3/resourceDrifts/sample4" + "auto_migration_config": { + "name": "projects/sample1/locations/sample2/autoMigrationConfig" + } } request = request_type(**request_init) @@ -22345,53 +23775,125 @@ def test_get_resource_drift_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_resource_drift(request) + client.update_auto_migration_config(request) @pytest.mark.parametrize( "request_type", [ - config.GetResourceDriftRequest, + config.UpdateAutoMigrationConfigRequest, dict, ], ) -def test_get_resource_drift_rest_call_success(request_type): +def test_update_auto_migration_config_rest_call_success(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/previews/sample3/resourceDrifts/sample4" + "auto_migration_config": { + "name": "projects/sample1/locations/sample2/autoMigrationConfig" + } } + request_init["auto_migration_config"] = { + "name": "projects/sample1/locations/sample2/autoMigrationConfig", + "update_time": {"seconds": 751, "nanos": 543}, + "auto_migration_enabled": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = config.UpdateAutoMigrationConfigRequest.meta.fields[ + "auto_migration_config" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "auto_migration_config" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["auto_migration_config"][field])): + del request_init["auto_migration_config"][field][i][subfield] + else: + del request_init["auto_migration_config"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ResourceDrift( - name="name_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.ResourceDrift.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_resource_drift(request) + response = client.update_auto_migration_config(request) # Establish that the response is the type that we expect. - assert isinstance(response, config.ResourceDrift) - assert response.name == "name_value" + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_resource_drift_rest_interceptors(null_interceptor): +def test_update_auto_migration_config_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -22403,16 +23905,21 @@ def test_get_resource_drift_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_get_resource_drift" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ConfigRestInterceptor, "post_update_auto_migration_config" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "post_get_resource_drift_with_metadata" + transports.ConfigRestInterceptor, + "post_update_auto_migration_config_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ConfigRestInterceptor, "pre_get_resource_drift" + transports.ConfigRestInterceptor, "pre_update_auto_migration_config" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.GetResourceDriftRequest.pb(config.GetResourceDriftRequest()) + pb_message = config.UpdateAutoMigrationConfigRequest.pb( + config.UpdateAutoMigrationConfigRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -22423,19 +23930,19 @@ def test_get_resource_drift_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = config.ResourceDrift.to_json(config.ResourceDrift()) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = config.GetResourceDriftRequest() + request = config.UpdateAutoMigrationConfigRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.ResourceDrift() - post_with_metadata.return_value = config.ResourceDrift(), metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.get_resource_drift( + client.update_auto_migration_config( request, metadata=[ ("key", "val"), @@ -23581,6 +25088,50 @@ def test_get_resource_drift_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_auto_migration_config_empty_call_rest(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_auto_migration_config), "__call__" + ) as call: + client.get_auto_migration_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.GetAutoMigrationConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_auto_migration_config_empty_call_rest(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_auto_migration_config), "__call__" + ) as call: + client.update_auto_migration_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.UpdateAutoMigrationConfigRequest() + + assert args[0] == request_msg + + def test_config_rest_lro_client(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23658,6 +25209,8 @@ def test_config_base_transport(): "get_resource_change", "list_resource_drifts", "get_resource_drift", + "get_auto_migration_config", + "update_auto_migration_config", "set_iam_policy", "get_iam_policy", "test_iam_permissions", @@ -24008,6 +25561,12 @@ def test_config_client_transport_session_collision(transport_name): session1 = client1.transport.get_resource_drift._session session2 = client2.transport.get_resource_drift._session assert session1 != session2 + session1 = client1.transport.get_auto_migration_config._session + session2 = client2.transport.get_auto_migration_config._session + assert session1 != session2 + session1 = client1.transport.update_auto_migration_config._session + session2 = client2.transport.update_auto_migration_config._session + assert session1 != session2 def test_config_grpc_transport_channel(): @@ -24163,10 +25722,33 @@ def test_config_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client -def test_deployment_path(): +def test_auto_migration_config_path(): project = "squid" location = "clam" - deployment = "whelk" + expected = "projects/{project}/locations/{location}/autoMigrationConfig".format( + project=project, + location=location, + ) + actual = ConfigClient.auto_migration_config_path(project, location) + assert expected == actual + + +def test_parse_auto_migration_config_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = ConfigClient.auto_migration_config_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigClient.parse_auto_migration_config_path(path) + assert expected == actual + + +def test_deployment_path(): + project = "oyster" + location = "nudibranch" + deployment = "cuttlefish" expected = ( "projects/{project}/locations/{location}/deployments/{deployment}".format( project=project, @@ -24180,9 +25762,9 @@ def test_deployment_path(): def test_parse_deployment_path(): expected = { - "project": "octopus", - "location": "oyster", - "deployment": "nudibranch", + "project": "mussel", + "location": "winkle", + "deployment": "nautilus", } path = ConfigClient.deployment_path(**expected) @@ -24192,9 +25774,9 @@ def test_parse_deployment_path(): def test_preview_path(): - project = "cuttlefish" - location = "mussel" - preview = "winkle" + project = "scallop" + location = "abalone" + preview = "squid" expected = "projects/{project}/locations/{location}/previews/{preview}".format( project=project, location=location, @@ -24206,9 +25788,9 @@ def test_preview_path(): def test_parse_preview_path(): expected = { - "project": "nautilus", - "location": "scallop", - "preview": "abalone", + "project": "clam", + "location": "whelk", + "preview": "octopus", } path = ConfigClient.preview_path(**expected) @@ -24218,11 +25800,11 @@ def test_parse_preview_path(): def test_resource_path(): - project = "squid" - location = "clam" - deployment = "whelk" - revision = "octopus" - resource = "oyster" + project = "oyster" + location = "nudibranch" + deployment = "cuttlefish" + revision = "mussel" + resource = "winkle" expected = "projects/{project}/locations/{location}/deployments/{deployment}/revisions/{revision}/resources/{resource}".format( project=project, location=location, @@ -24238,11 +25820,11 @@ def test_resource_path(): def test_parse_resource_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "deployment": "mussel", - "revision": "winkle", - "resource": "nautilus", + "project": "nautilus", + "location": "scallop", + "deployment": "abalone", + "revision": "squid", + "resource": "clam", } path = ConfigClient.resource_path(**expected) @@ -24252,10 +25834,10 @@ def test_parse_resource_path(): def test_resource_change_path(): - project = "scallop" - location = "abalone" - preview = "squid" - resource_change = "clam" + project = "whelk" + location = "octopus" + preview = "oyster" + resource_change = "nudibranch" expected = "projects/{project}/locations/{location}/previews/{preview}/resourceChanges/{resource_change}".format( project=project, location=location, @@ -24270,10 +25852,10 @@ def test_resource_change_path(): def test_parse_resource_change_path(): expected = { - "project": "whelk", - "location": "octopus", - "preview": "oyster", - "resource_change": "nudibranch", + "project": "cuttlefish", + "location": "mussel", + "preview": "winkle", + "resource_change": "nautilus", } path = ConfigClient.resource_change_path(**expected) @@ -24283,10 +25865,10 @@ def test_parse_resource_change_path(): def test_resource_drift_path(): - project = "cuttlefish" - location = "mussel" - preview = "winkle" - resource_drift = "nautilus" + project = "scallop" + location = "abalone" + preview = "squid" + resource_drift = "clam" expected = "projects/{project}/locations/{location}/previews/{preview}/resourceDrifts/{resource_drift}".format( project=project, location=location, @@ -24301,10 +25883,10 @@ def test_resource_drift_path(): def test_parse_resource_drift_path(): expected = { - "project": "scallop", - "location": "abalone", - "preview": "squid", - "resource_drift": "clam", + "project": "whelk", + "location": "octopus", + "preview": "oyster", + "resource_drift": "nudibranch", } path = ConfigClient.resource_drift_path(**expected) @@ -24314,10 +25896,10 @@ def test_parse_resource_drift_path(): def test_revision_path(): - project = "whelk" - location = "octopus" - deployment = "oyster" - revision = "nudibranch" + project = "cuttlefish" + location = "mussel" + deployment = "winkle" + revision = "nautilus" expected = "projects/{project}/locations/{location}/deployments/{deployment}/revisions/{revision}".format( project=project, location=location, @@ -24330,10 +25912,10 @@ def test_revision_path(): def test_parse_revision_path(): expected = { - "project": "cuttlefish", - "location": "mussel", - "deployment": "winkle", - "revision": "nautilus", + "project": "scallop", + "location": "abalone", + "deployment": "squid", + "revision": "clam", } path = ConfigClient.revision_path(**expected) @@ -24343,8 +25925,8 @@ def test_parse_revision_path(): def test_service_account_path(): - project = "scallop" - service_account = "abalone" + project = "whelk" + service_account = "octopus" expected = "projects/{project}/serviceAccounts/{service_account}".format( project=project, service_account=service_account, @@ -24355,8 +25937,8 @@ def test_service_account_path(): def test_parse_service_account_path(): expected = { - "project": "squid", - "service_account": "clam", + "project": "oyster", + "service_account": "nudibranch", } path = ConfigClient.service_account_path(**expected) @@ -24366,9 +25948,9 @@ def test_parse_service_account_path(): def test_terraform_version_path(): - project = "whelk" - location = "octopus" - terraform_version = "oyster" + project = "cuttlefish" + location = "mussel" + terraform_version = "winkle" expected = "projects/{project}/locations/{location}/terraformVersions/{terraform_version}".format( project=project, location=location, @@ -24380,9 +25962,9 @@ def test_terraform_version_path(): def test_parse_terraform_version_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "terraform_version": "mussel", + "project": "nautilus", + "location": "scallop", + "terraform_version": "abalone", } path = ConfigClient.terraform_version_path(**expected) @@ -24392,9 +25974,9 @@ def test_parse_terraform_version_path(): def test_worker_pool_path(): - project = "winkle" - location = "nautilus" - worker_pool = "scallop" + project = "squid" + location = "clam" + worker_pool = "whelk" expected = ( "projects/{project}/locations/{location}/workerPools/{worker_pool}".format( project=project, @@ -24408,9 +25990,9 @@ def test_worker_pool_path(): def test_parse_worker_pool_path(): expected = { - "project": "abalone", - "location": "squid", - "worker_pool": "clam", + "project": "octopus", + "location": "oyster", + "worker_pool": "nudibranch", } path = ConfigClient.worker_pool_path(**expected) @@ -24420,7 +26002,7 @@ def test_parse_worker_pool_path(): def test_common_billing_account_path(): - billing_account = "whelk" + billing_account = "cuttlefish" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -24430,7 +26012,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "octopus", + "billing_account": "mussel", } path = ConfigClient.common_billing_account_path(**expected) @@ -24440,7 +26022,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "oyster" + folder = "winkle" expected = "folders/{folder}".format( folder=folder, ) @@ -24450,7 +26032,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nudibranch", + "folder": "nautilus", } path = ConfigClient.common_folder_path(**expected) @@ -24460,7 +26042,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "cuttlefish" + organization = "scallop" expected = "organizations/{organization}".format( organization=organization, ) @@ -24470,7 +26052,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "mussel", + "organization": "abalone", } path = ConfigClient.common_organization_path(**expected) @@ -24480,7 +26062,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "winkle" + project = "squid" expected = "projects/{project}".format( project=project, ) @@ -24490,7 +26072,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nautilus", + "project": "clam", } path = ConfigClient.common_project_path(**expected) @@ -24500,8 +26082,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "scallop" - location = "abalone" + project = "whelk" + location = "octopus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -24512,8 +26094,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "squid", - "location": "clam", + "project": "oyster", + "location": "nudibranch", } path = ConfigClient.common_location_path(**expected) diff --git a/packages/google-cloud-databasecenter/google/cloud/databasecenter/__init__.py b/packages/google-cloud-databasecenter/google/cloud/databasecenter/__init__.py index cdd5b6cbc8e9..99765a516a64 100644 --- a/packages/google-cloud-databasecenter/google/cloud/databasecenter/__init__.py +++ b/packages/google-cloud-databasecenter/google/cloud/databasecenter/__init__.py @@ -45,16 +45,23 @@ ProductType, ) from google.cloud.databasecenter_v1beta.types.service import ( + AggregateFleetRequest, + AggregateFleetResponse, + AggregateFleetRow, BackupDRConfig, DatabaseResource, DatabaseResourceGroup, + DeltaDetails, + Dimension, Edition, Label, + ManagementType, QueryDatabaseResourceGroupsRequest, QueryDatabaseResourceGroupsResponse, QueryProductsRequest, QueryProductsResponse, ResourceCategory, + ResourceDetails, SubResourceType, Tag, ) @@ -99,16 +106,23 @@ "Product", "Engine", "ProductType", + "AggregateFleetRequest", + "AggregateFleetResponse", + "AggregateFleetRow", "BackupDRConfig", "DatabaseResource", "DatabaseResourceGroup", + "DeltaDetails", + "Dimension", "Label", "QueryDatabaseResourceGroupsRequest", "QueryDatabaseResourceGroupsResponse", "QueryProductsRequest", "QueryProductsResponse", + "ResourceDetails", "Tag", "Edition", + "ManagementType", "ResourceCategory", "SubResourceType", "AdditionalDetail", diff --git a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/__init__.py b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/__init__.py index 5d31f9d262be..4b97d7f5e756 100644 --- a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/__init__.py +++ b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/__init__.py @@ -40,16 +40,23 @@ from .types.operation_error_type import OperationErrorType from .types.product import Engine, Product, ProductType from .types.service import ( + AggregateFleetRequest, + AggregateFleetResponse, + AggregateFleetRow, BackupDRConfig, DatabaseResource, DatabaseResourceGroup, + DeltaDetails, + Dimension, Edition, Label, + ManagementType, QueryDatabaseResourceGroupsRequest, QueryDatabaseResourceGroupsResponse, QueryProductsRequest, QueryProductsResponse, ResourceCategory, + ResourceDetails, SubResourceType, Tag, ) @@ -176,6 +183,9 @@ def _get_version(dependency_name): __all__ = ( "DatabaseCenterAsyncClient", "AdditionalDetail", + "AggregateFleetRequest", + "AggregateFleetResponse", + "AggregateFleetRow", "AutomatedBackupPolicyInfo", "BackupDRConfig", "BackupRunInfo", @@ -183,6 +193,8 @@ def _get_version(dependency_name): "DatabaseResource", "DatabaseResourceGroup", "DeletionProtectionInfo", + "DeltaDetails", + "Dimension", "Edition", "Engine", "InefficientQueryInfo", @@ -192,6 +204,7 @@ def _get_version(dependency_name): "MachineConfig", "MaintenanceInfo", "MaintenanceRecommendationInfo", + "ManagementType", "MetricData", "Metrics", "OperationErrorType", @@ -206,6 +219,7 @@ def _get_version(dependency_name): "RecommendationInfo", "RegulatoryStandard", "ResourceCategory", + "ResourceDetails", "ResourceMaintenanceDenySchedule", "ResourceMaintenanceSchedule", "ResourceSuspensionInfo", diff --git a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/gapic_metadata.json b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/gapic_metadata.json index e9717f5f2070..06e0d17036fb 100644 --- a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/gapic_metadata.json +++ b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/gapic_metadata.json @@ -10,6 +10,11 @@ "grpc": { "libraryClient": "DatabaseCenterClient", "rpcs": { + "AggregateFleet": { + "methods": [ + "aggregate_fleet" + ] + }, "QueryDatabaseResourceGroups": { "methods": [ "query_database_resource_groups" @@ -25,6 +30,11 @@ "grpc-async": { "libraryClient": "DatabaseCenterAsyncClient", "rpcs": { + "AggregateFleet": { + "methods": [ + "aggregate_fleet" + ] + }, "QueryDatabaseResourceGroups": { "methods": [ "query_database_resource_groups" @@ -40,6 +50,11 @@ "rest": { "libraryClient": "DatabaseCenterClient", "rpcs": { + "AggregateFleet": { + "methods": [ + "aggregate_fleet" + ] + }, "QueryDatabaseResourceGroups": { "methods": [ "query_database_resource_groups" diff --git a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/async_client.py b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/async_client.py index be731f42c498..590e9d24d035 100644 --- a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/async_client.py +++ b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/async_client.py @@ -383,6 +383,102 @@ async def sample_query_products(): # Done; return the response. return response + async def aggregate_fleet( + self, + request: Optional[Union[service.AggregateFleetRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.AggregateFleetAsyncPager: + r"""AggregateFleet provides statistics about the fleet + grouped by various fields. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import databasecenter_v1beta + + async def sample_aggregate_fleet(): + # Create a client + client = databasecenter_v1beta.DatabaseCenterAsyncClient() + + # Initialize request argument(s) + request = databasecenter_v1beta.AggregateFleetRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.aggregate_fleet(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.databasecenter_v1beta.types.AggregateFleetRequest, dict]]): + The request object. The request message to aggregate + fleet which are grouped by a field. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.databasecenter_v1beta.services.database_center.pagers.AggregateFleetAsyncPager: + The response message to aggregate a + fleet by some group by fields. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.AggregateFleetRequest): + request = service.AggregateFleetRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.aggregate_fleet + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.AggregateFleetAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def query_database_resource_groups( self, request: Optional[ diff --git a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/client.py b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/client.py index df45047c1aa8..dc1195a9a103 100644 --- a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/client.py +++ b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/client.py @@ -802,6 +802,100 @@ def sample_query_products(): # Done; return the response. return response + def aggregate_fleet( + self, + request: Optional[Union[service.AggregateFleetRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.AggregateFleetPager: + r"""AggregateFleet provides statistics about the fleet + grouped by various fields. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import databasecenter_v1beta + + def sample_aggregate_fleet(): + # Create a client + client = databasecenter_v1beta.DatabaseCenterClient() + + # Initialize request argument(s) + request = databasecenter_v1beta.AggregateFleetRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.aggregate_fleet(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.databasecenter_v1beta.types.AggregateFleetRequest, dict]): + The request object. The request message to aggregate + fleet which are grouped by a field. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.databasecenter_v1beta.services.database_center.pagers.AggregateFleetPager: + The response message to aggregate a + fleet by some group by fields. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.AggregateFleetRequest): + request = service.AggregateFleetRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregate_fleet] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregateFleetPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def query_database_resource_groups( self, request: Optional[ diff --git a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/pagers.py b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/pagers.py index 3203fe48af87..cbf5725278cc 100644 --- a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/pagers.py +++ b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/pagers.py @@ -197,6 +197,162 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) +class AggregateFleetPager: + """A pager for iterating through ``aggregate_fleet`` requests. + + This class thinly wraps an initial + :class:`google.cloud.databasecenter_v1beta.types.AggregateFleetResponse` object, and + provides an ``__iter__`` method to iterate through its + ``rows`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregateFleet`` requests and continue to iterate + through the ``rows`` field on the + corresponding responses. + + All the usual :class:`google.cloud.databasecenter_v1beta.types.AggregateFleetResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.AggregateFleetResponse], + request: service.AggregateFleetRequest, + response: service.AggregateFleetResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.databasecenter_v1beta.types.AggregateFleetRequest): + The initial request object. + response (google.cloud.databasecenter_v1beta.types.AggregateFleetResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = service.AggregateFleetRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.AggregateFleetResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[service.AggregateFleetRow]: + for page in self.pages: + yield from page.rows + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class AggregateFleetAsyncPager: + """A pager for iterating through ``aggregate_fleet`` requests. + + This class thinly wraps an initial + :class:`google.cloud.databasecenter_v1beta.types.AggregateFleetResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``rows`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``AggregateFleet`` requests and continue to iterate + through the ``rows`` field on the + corresponding responses. + + All the usual :class:`google.cloud.databasecenter_v1beta.types.AggregateFleetResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.AggregateFleetResponse]], + request: service.AggregateFleetRequest, + response: service.AggregateFleetResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.databasecenter_v1beta.types.AggregateFleetRequest): + The initial request object. + response (google.cloud.databasecenter_v1beta.types.AggregateFleetResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = service.AggregateFleetRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.AggregateFleetResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[service.AggregateFleetRow]: + async def async_generator(): + async for page in self.pages: + for response in page.rows: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class QueryDatabaseResourceGroupsPager: """A pager for iterating through ``query_database_resource_groups`` requests. diff --git a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/transports/base.py b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/transports/base.py index e0dbddabb305..07a22b06b820 100644 --- a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/transports/base.py +++ b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/transports/base.py @@ -138,6 +138,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.aggregate_fleet: gapic_v1.method.wrap_method( + self.aggregate_fleet, + default_timeout=None, + client_info=client_info, + ), self.query_database_resource_groups: gapic_v1.method.wrap_method( self.query_database_resource_groups, default_timeout=None, @@ -163,6 +168,17 @@ def query_products( ]: raise NotImplementedError() + @property + def aggregate_fleet( + self, + ) -> Callable[ + [service.AggregateFleetRequest], + Union[ + service.AggregateFleetResponse, Awaitable[service.AggregateFleetResponse] + ], + ]: + raise NotImplementedError() + @property def query_database_resource_groups( self, diff --git a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/transports/grpc.py b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/transports/grpc.py index b4c32b4bfd02..dac449683c5a 100644 --- a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/transports/grpc.py +++ b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/transports/grpc.py @@ -349,6 +349,33 @@ def query_products( ) return self._stubs["query_products"] + @property + def aggregate_fleet( + self, + ) -> Callable[[service.AggregateFleetRequest], service.AggregateFleetResponse]: + r"""Return a callable for the aggregate fleet method over gRPC. + + AggregateFleet provides statistics about the fleet + grouped by various fields. + + Returns: + Callable[[~.AggregateFleetRequest], + ~.AggregateFleetResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "aggregate_fleet" not in self._stubs: + self._stubs["aggregate_fleet"] = self._logged_channel.unary_unary( + "/google.cloud.databasecenter.v1beta.DatabaseCenter/AggregateFleet", + request_serializer=service.AggregateFleetRequest.serialize, + response_deserializer=service.AggregateFleetResponse.deserialize, + ) + return self._stubs["aggregate_fleet"] + @property def query_database_resource_groups( self, diff --git a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/transports/grpc_asyncio.py b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/transports/grpc_asyncio.py index 113bed754938..6003afa9b3e3 100644 --- a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/transports/grpc_asyncio.py +++ b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/transports/grpc_asyncio.py @@ -359,6 +359,35 @@ def query_products( ) return self._stubs["query_products"] + @property + def aggregate_fleet( + self, + ) -> Callable[ + [service.AggregateFleetRequest], Awaitable[service.AggregateFleetResponse] + ]: + r"""Return a callable for the aggregate fleet method over gRPC. + + AggregateFleet provides statistics about the fleet + grouped by various fields. + + Returns: + Callable[[~.AggregateFleetRequest], + Awaitable[~.AggregateFleetResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "aggregate_fleet" not in self._stubs: + self._stubs["aggregate_fleet"] = self._logged_channel.unary_unary( + "/google.cloud.databasecenter.v1beta.DatabaseCenter/AggregateFleet", + request_serializer=service.AggregateFleetRequest.serialize, + response_deserializer=service.AggregateFleetResponse.deserialize, + ) + return self._stubs["aggregate_fleet"] + @property def query_database_resource_groups( self, @@ -399,6 +428,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.aggregate_fleet: self._wrap_method( + self.aggregate_fleet, + default_timeout=None, + client_info=client_info, + ), self.query_database_resource_groups: self._wrap_method( self.query_database_resource_groups, default_timeout=None, diff --git a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/transports/rest.py b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/transports/rest.py index b0bc011db31f..e84fca4f1901 100644 --- a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/transports/rest.py +++ b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/transports/rest.py @@ -72,6 +72,14 @@ class DatabaseCenterRestInterceptor: .. code-block:: python class MyCustomDatabaseCenterInterceptor(DatabaseCenterRestInterceptor): + def pre_aggregate_fleet(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregate_fleet(self, response): + logging.log(f"Received response: {response}") + return response + def pre_query_database_resource_groups(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -94,6 +102,52 @@ def post_query_products(self, response): """ + def pre_aggregate_fleet( + self, + request: service.AggregateFleetRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.AggregateFleetRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for aggregate_fleet + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseCenter server. + """ + return request, metadata + + def post_aggregate_fleet( + self, response: service.AggregateFleetResponse + ) -> service.AggregateFleetResponse: + """Post-rpc interceptor for aggregate_fleet + + DEPRECATED. Please use the `post_aggregate_fleet_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseCenter server but before + it is returned to user code. This `post_aggregate_fleet` interceptor runs + before the `post_aggregate_fleet_with_metadata` interceptor. + """ + return response + + def post_aggregate_fleet_with_metadata( + self, + response: service.AggregateFleetResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.AggregateFleetResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for aggregate_fleet + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseCenter server but before it is returned to user code. + + We recommend only using this `post_aggregate_fleet_with_metadata` + interceptor in new development instead of the `post_aggregate_fleet` interceptor. + When both interceptors are used, this `post_aggregate_fleet_with_metadata` interceptor runs after the + `post_aggregate_fleet` interceptor. The (possibly modified) response returned by + `post_aggregate_fleet` will be passed to + `post_aggregate_fleet_with_metadata`. + """ + return response, metadata + def pre_query_database_resource_groups( self, request: service.QueryDatabaseResourceGroupsRequest, @@ -281,6 +335,153 @@ def __init__( self._interceptor = interceptor or DatabaseCenterRestInterceptor() self._prep_wrapped_messages(client_info) + class _AggregateFleet( + _BaseDatabaseCenterRestTransport._BaseAggregateFleet, DatabaseCenterRestStub + ): + def __hash__(self): + return hash("DatabaseCenterRestTransport.AggregateFleet") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: service.AggregateFleetRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> service.AggregateFleetResponse: + r"""Call the aggregate fleet method over HTTP. + + Args: + request (~.service.AggregateFleetRequest): + The request object. The request message to aggregate + fleet which are grouped by a field. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.service.AggregateFleetResponse: + The response message to aggregate a + fleet by some group by fields. + + """ + + http_options = ( + _BaseDatabaseCenterRestTransport._BaseAggregateFleet._get_http_options() + ) + + request, metadata = self._interceptor.pre_aggregate_fleet(request, metadata) + transcoded_request = _BaseDatabaseCenterRestTransport._BaseAggregateFleet._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDatabaseCenterRestTransport._BaseAggregateFleet._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.databasecenter_v1beta.DatabaseCenterClient.AggregateFleet", + extra={ + "serviceName": "google.cloud.databasecenter.v1beta.DatabaseCenter", + "rpcName": "AggregateFleet", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseCenterRestTransport._AggregateFleet._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.AggregateFleetResponse() + pb_resp = service.AggregateFleetResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_aggregate_fleet(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregate_fleet_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = service.AggregateFleetResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.databasecenter_v1beta.DatabaseCenterClient.aggregate_fleet", + extra={ + "serviceName": "google.cloud.databasecenter.v1beta.DatabaseCenter", + "rpcName": "AggregateFleet", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _QueryDatabaseResourceGroups( _BaseDatabaseCenterRestTransport._BaseQueryDatabaseResourceGroups, DatabaseCenterRestStub, @@ -594,6 +795,14 @@ def __call__( ) return resp + @property + def aggregate_fleet( + self, + ) -> Callable[[service.AggregateFleetRequest], service.AggregateFleetResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregateFleet(self._session, self._host, self._interceptor) # type: ignore + @property def query_database_resource_groups( self, diff --git a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/transports/rest_base.py b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/transports/rest_base.py index b45f63ab042b..866692542068 100644 --- a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/transports/rest_base.py +++ b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/services/database_center/transports/rest_base.py @@ -87,6 +87,55 @@ def __init__( api_audience=api_audience, ) + class _BaseAggregateFleet: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "parent": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta:aggregateFleet", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.AggregateFleetRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDatabaseCenterRestTransport._BaseAggregateFleet._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseQueryDatabaseResourceGroups: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/types/__init__.py b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/types/__init__.py index ef3f0dc9a2e9..6e85318d18da 100644 --- a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/types/__init__.py +++ b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/types/__init__.py @@ -24,16 +24,23 @@ from .operation_error_type import OperationErrorType from .product import Engine, Product, ProductType from .service import ( + AggregateFleetRequest, + AggregateFleetResponse, + AggregateFleetRow, BackupDRConfig, DatabaseResource, DatabaseResourceGroup, + DeltaDetails, + Dimension, Edition, Label, + ManagementType, QueryDatabaseResourceGroupsRequest, QueryDatabaseResourceGroupsResponse, QueryProductsRequest, QueryProductsResponse, ResourceCategory, + ResourceDetails, SubResourceType, Tag, ) @@ -76,16 +83,23 @@ "Product", "Engine", "ProductType", + "AggregateFleetRequest", + "AggregateFleetResponse", + "AggregateFleetRow", "BackupDRConfig", "DatabaseResource", "DatabaseResourceGroup", + "DeltaDetails", + "Dimension", "Label", "QueryDatabaseResourceGroupsRequest", "QueryDatabaseResourceGroupsResponse", "QueryProductsRequest", "QueryProductsResponse", + "ResourceDetails", "Tag", "Edition", + "ManagementType", "ResourceCategory", "SubResourceType", "AdditionalDetail", diff --git a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/types/service.py b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/types/service.py index ed3ca6627f1b..62cb926d1718 100644 --- a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/types/service.py +++ b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/types/service.py @@ -17,6 +17,7 @@ from typing import MutableMapping, MutableSequence +from google.type import date_pb2 # type: ignore import proto # type: ignore from google.cloud.databasecenter_v1beta.types import ( @@ -32,6 +33,7 @@ "ResourceCategory", "Edition", "SubResourceType", + "ManagementType", "QueryProductsRequest", "QueryProductsResponse", "QueryDatabaseResourceGroupsRequest", @@ -39,8 +41,14 @@ "DatabaseResourceGroup", "DatabaseResource", "Label", + "AggregateFleetRequest", + "AggregateFleetResponse", + "AggregateFleetRow", + "Dimension", "BackupDRConfig", "Tag", + "ResourceDetails", + "DeltaDetails", }, ) @@ -111,6 +119,22 @@ class SubResourceType(proto.Enum): SUB_RESOURCE_TYPE_OTHER = 4 +class ManagementType(proto.Enum): + r"""The management type of the resource. + + Values: + MANAGEMENT_TYPE_UNSPECIFIED (0): + Unspecified. + MANAGEMENT_TYPE_GCP_MANAGED (1): + Google-managed resource. + MANAGEMENT_TYPE_SELF_MANAGED (2): + Self-managed resource. + """ + MANAGEMENT_TYPE_UNSPECIFIED = 0 + MANAGEMENT_TYPE_GCP_MANAGED = 1 + MANAGEMENT_TYPE_SELF_MANAGED = 2 + + class QueryProductsRequest(proto.Message): r"""QueryProductsRequest is the request to get a list of products. @@ -599,6 +623,429 @@ class Label(proto.Message): ) +class AggregateFleetRequest(proto.Message): + r"""The request message to aggregate fleet which are grouped by a + field. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. Parent can be a project, a folder, or an + organization. The search is limited to the resources within + the ``scope``. + + The allowed values are: + + - projects/{PROJECT_ID} (e.g., "projects/foo-bar") + - projects/{PROJECT_NUMBER} (e.g., "projects/12345678") + - folders/{FOLDER_NUMBER} (e.g., "folders/1234567") + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") + filter (str): + Optional. The expression to filter resources. + + Supported fields are: ``full_resource_name``, + ``resource_type``, ``container``, ``product.type``, + ``product.engine``, ``product.version``, ``location``, + ``labels``, ``issues``, fields of availability_info, + data_protection_info, 'resource_name', etc. + + The expression is a list of zero or more restrictions + combined via logical operators ``AND`` and ``OR``. When + ``AND`` and ``OR`` are both used in the expression, + parentheses must be appropriately used to group the + combinations. + + Example: location="us-east1" Example: + container="projects/123" OR container="projects/456" + Example: (container="projects/123" OR + container="projects/456") AND location="us-east1". + group_by (str): + Optional. A field that statistics are grouped by. Valid + values are any combination of the following: + + - container + - product.type + - product.engine + - product.version + - location + - sub_resource_type + - management_type + - tag.key + - tag.value + - tag.source + - tag.inherited + - label.key + - label.value + - label.source + - has_maintenance_schedule + - has_deny_maintenance_schedules Comma separated list. + order_by (str): + Optional. Valid values to order by are: + + - resource_groups_count + - resources_count + - and all fields supported by ``group_by`` The default order + is ascending. Add "DESC" after the field name to indicate + descending order. Add "ASC" after the field name to + indicate ascending order. It supports ordering using + multiple fields. For example: order_by = + "resource_groups_count" sorts response in ascending order + order_by = "resource_groups_count DESC" sorts response in + descending order order_by = "product.type, product.version + DESC, location" orders by type in ascending order, version + in descending order and location in ascending order + page_size (int): + Optional. If unspecified, at most 50 items + will be returned. The maximum value is 1000; + values above 1000 will be coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ``AggregateFleet`` call. Provide this to retrieve the + subsequent page. All other parameters should match the + parameters in the call that provided the page token except + for page_size which can be different. + baseline_date (google.type.date_pb2.Date): + Optional. The baseline date w.r.t. which the + delta counts are calculated. If not set, delta + counts are not included in the response and the + response indicates the current state of the + fleet. + + This field is a member of `oneof`_ ``_baseline_date``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + group_by: str = proto.Field( + proto.STRING, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + page_size: int = proto.Field( + proto.INT32, + number=5, + ) + page_token: str = proto.Field( + proto.STRING, + number=6, + ) + baseline_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=7, + optional=True, + message=date_pb2.Date, + ) + + +class AggregateFleetResponse(proto.Message): + r"""The response message to aggregate a fleet by some group by + fields. + + Attributes: + rows (MutableSequence[google.cloud.databasecenter_v1beta.types.AggregateFleetRow]): + Represents a row grouped by the fields in the + input. + resource_groups_total_count (int): + Count of all resource groups in the fleet. + This includes counts from all pages. + resource_total_count (int): + Count of all resources in the fleet. This + includes counts from all pages. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Unordered list. List of unreachable regions + from where data could not be retrieved. + """ + + @property + def raw_page(self): + return self + + rows: MutableSequence["AggregateFleetRow"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AggregateFleetRow", + ) + resource_groups_total_count: int = proto.Field( + proto.INT32, + number=2, + ) + resource_total_count: int = proto.Field( + proto.INT32, + number=3, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=4, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + + +class AggregateFleetRow(proto.Message): + r"""Individual row grouped by a particular dimension. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + dimension (MutableSequence[google.cloud.databasecenter_v1beta.types.Dimension]): + Group by dimension. + resource_groups_count (int): + Number of resource groups that have a + particular dimension. + resources_count (int): + Number of resources that have a particular + dimension. + delta_details (google.cloud.databasecenter_v1beta.types.DeltaDetails): + Optional. Delta counts and details of + resources which were added to/deleted from + fleet. + + This field is a member of `oneof`_ ``_delta_details``. + """ + + dimension: MutableSequence["Dimension"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Dimension", + ) + resource_groups_count: int = proto.Field( + proto.INT32, + number=2, + ) + resources_count: int = proto.Field( + proto.INT32, + number=3, + ) + delta_details: "DeltaDetails" = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message="DeltaDetails", + ) + + +class Dimension(proto.Message): + r"""Dimension used to aggregate the fleet. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + container (str): + Specifies where the resource is created. For + GCP, it is the full name of the project. + + This field is a member of `oneof`_ ``dimension``. + product_type (google.cloud.databasecenter_v1beta.types.ProductType): + Type to identify a product + + This field is a member of `oneof`_ ``dimension``. + product_engine (google.cloud.databasecenter_v1beta.types.Engine): + Engine refers to underlying database binary + running in an instance. + + This field is a member of `oneof`_ ``dimension``. + product_version (str): + Version of the underlying database engine + + This field is a member of `oneof`_ ``dimension``. + location (str): + The location of the resources. It supports + returning only regional locations in GCP. + + This field is a member of `oneof`_ ``dimension``. + resource_type (str): + The type of resource defined according to the + pattern: {Service Name}/{Type}. Ex: + + sqladmin.googleapis.com/Instance + alloydb.googleapis.com/Cluster + alloydb.googleapis.com/Instance + spanner.googleapis.com/Instance + + This field is a member of `oneof`_ ``dimension``. + sub_resource_type (google.cloud.databasecenter_v1beta.types.SubResourceType): + Subtype of the resource specified at creation + time. + + This field is a member of `oneof`_ ``dimension``. + resource_category (google.cloud.databasecenter_v1beta.types.ResourceCategory): + The category of the resource. + + This field is a member of `oneof`_ ``dimension``. + management_type (google.cloud.databasecenter_v1beta.types.ManagementType): + The management type of the resource. + + This field is a member of `oneof`_ ``dimension``. + edition (google.cloud.databasecenter_v1beta.types.Edition): + The edition of the resource. + + This field is a member of `oneof`_ ``dimension``. + tag_key (str): + Tag key of the resource. + + This field is a member of `oneof`_ ``dimension``. + tag_value (str): + Tag value of the resource. + + This field is a member of `oneof`_ ``dimension``. + tag_source (str): + Tag source of the resource. + + This field is a member of `oneof`_ ``dimension``. + tag_inherited (bool): + Tag inheritance value of the resource. + + This field is a member of `oneof`_ ``dimension``. + label_key (str): + Label key of the resource. + + This field is a member of `oneof`_ ``dimension``. + label_value (str): + Label value of the resource. + + This field is a member of `oneof`_ ``dimension``. + label_source (str): + Label source of the resource. + + This field is a member of `oneof`_ ``dimension``. + has_maintenance_schedule (bool): + Whether the resource has a maintenance + schedule. + + This field is a member of `oneof`_ ``dimension``. + has_deny_maintenance_schedules (bool): + Whether the resource has deny maintenance + schedules. + + This field is a member of `oneof`_ ``dimension``. + """ + + container: str = proto.Field( + proto.STRING, + number=2, + oneof="dimension", + ) + product_type: gcd_product.ProductType = proto.Field( + proto.ENUM, + number=3, + oneof="dimension", + enum=gcd_product.ProductType, + ) + product_engine: gcd_product.Engine = proto.Field( + proto.ENUM, + number=4, + oneof="dimension", + enum=gcd_product.Engine, + ) + product_version: str = proto.Field( + proto.STRING, + number=5, + oneof="dimension", + ) + location: str = proto.Field( + proto.STRING, + number=6, + oneof="dimension", + ) + resource_type: str = proto.Field( + proto.STRING, + number=7, + oneof="dimension", + ) + sub_resource_type: "SubResourceType" = proto.Field( + proto.ENUM, + number=8, + oneof="dimension", + enum="SubResourceType", + ) + resource_category: "ResourceCategory" = proto.Field( + proto.ENUM, + number=9, + oneof="dimension", + enum="ResourceCategory", + ) + management_type: "ManagementType" = proto.Field( + proto.ENUM, + number=10, + oneof="dimension", + enum="ManagementType", + ) + edition: "Edition" = proto.Field( + proto.ENUM, + number=11, + oneof="dimension", + enum="Edition", + ) + tag_key: str = proto.Field( + proto.STRING, + number=12, + oneof="dimension", + ) + tag_value: str = proto.Field( + proto.STRING, + number=13, + oneof="dimension", + ) + tag_source: str = proto.Field( + proto.STRING, + number=14, + oneof="dimension", + ) + tag_inherited: bool = proto.Field( + proto.BOOL, + number=15, + oneof="dimension", + ) + label_key: str = proto.Field( + proto.STRING, + number=16, + oneof="dimension", + ) + label_value: str = proto.Field( + proto.STRING, + number=17, + oneof="dimension", + ) + label_source: str = proto.Field( + proto.STRING, + number=18, + oneof="dimension", + ) + has_maintenance_schedule: bool = proto.Field( + proto.BOOL, + number=19, + oneof="dimension", + ) + has_deny_maintenance_schedules: bool = proto.Field( + proto.BOOL, + number=20, + oneof="dimension", + ) + + class BackupDRConfig(proto.Message): r"""BackupDRConfig to capture the backup and disaster recovery details of database resource. @@ -665,4 +1112,65 @@ class Tag(proto.Message): ) +class ResourceDetails(proto.Message): + r"""Capture the resource details for resources that are included + in the delta counts. + + Attributes: + full_resource_name (str): + Full resource name of the resource. + container (str): + Specifies where the resource is created. For + GCP, it is the full name of the project. + product (google.cloud.databasecenter_v1beta.types.Product): + Product type of the resource. + location (str): + Location of the resource. + """ + + full_resource_name: str = proto.Field( + proto.STRING, + number=1, + ) + container: str = proto.Field( + proto.STRING, + number=2, + ) + product: gcd_product.Product = proto.Field( + proto.MESSAGE, + number=3, + message=gcd_product.Product, + ) + location: str = proto.Field( + proto.STRING, + number=4, + ) + + +class DeltaDetails(proto.Message): + r"""Captures the details of items that have increased or + decreased in some bucket when compared to some point in history. + It is currently used to capture the delta of resources that have + been added or removed in the fleet as well as to capture the + resources that have a change in Issue/Signal status. + + Attributes: + increased_resources (MutableSequence[google.cloud.databasecenter_v1beta.types.ResourceDetails]): + Details of resources that have increased. + decreased_resources (MutableSequence[google.cloud.databasecenter_v1beta.types.ResourceDetails]): + Details of resources that have decreased. + """ + + increased_resources: MutableSequence["ResourceDetails"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ResourceDetails", + ) + decreased_resources: MutableSequence["ResourceDetails"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="ResourceDetails", + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-databasecenter/samples/generated_samples/databasecenter_v1beta_generated_database_center_aggregate_fleet_async.py b/packages/google-cloud-databasecenter/samples/generated_samples/databasecenter_v1beta_generated_database_center_aggregate_fleet_async.py new file mode 100644 index 000000000000..c23b43da37f9 --- /dev/null +++ b/packages/google-cloud-databasecenter/samples/generated_samples/databasecenter_v1beta_generated_database_center_aggregate_fleet_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregateFleet +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-databasecenter + + +# [START databasecenter_v1beta_generated_DatabaseCenter_AggregateFleet_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import databasecenter_v1beta + + +async def sample_aggregate_fleet(): + # Create a client + client = databasecenter_v1beta.DatabaseCenterAsyncClient() + + # Initialize request argument(s) + request = databasecenter_v1beta.AggregateFleetRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.aggregate_fleet(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END databasecenter_v1beta_generated_DatabaseCenter_AggregateFleet_async] diff --git a/packages/google-cloud-databasecenter/samples/generated_samples/databasecenter_v1beta_generated_database_center_aggregate_fleet_sync.py b/packages/google-cloud-databasecenter/samples/generated_samples/databasecenter_v1beta_generated_database_center_aggregate_fleet_sync.py new file mode 100644 index 000000000000..6336086e0b4f --- /dev/null +++ b/packages/google-cloud-databasecenter/samples/generated_samples/databasecenter_v1beta_generated_database_center_aggregate_fleet_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregateFleet +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-databasecenter + + +# [START databasecenter_v1beta_generated_DatabaseCenter_AggregateFleet_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import databasecenter_v1beta + + +def sample_aggregate_fleet(): + # Create a client + client = databasecenter_v1beta.DatabaseCenterClient() + + # Initialize request argument(s) + request = databasecenter_v1beta.AggregateFleetRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.aggregate_fleet(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END databasecenter_v1beta_generated_DatabaseCenter_AggregateFleet_sync] diff --git a/packages/google-cloud-databasecenter/samples/generated_samples/snippet_metadata_google.cloud.databasecenter.v1beta.json b/packages/google-cloud-databasecenter/samples/generated_samples/snippet_metadata_google.cloud.databasecenter.v1beta.json index 3218cca5b21a..162d29e59d42 100644 --- a/packages/google-cloud-databasecenter/samples/generated_samples/snippet_metadata_google.cloud.databasecenter.v1beta.json +++ b/packages/google-cloud-databasecenter/samples/generated_samples/snippet_metadata_google.cloud.databasecenter.v1beta.json @@ -11,6 +11,159 @@ "version": "0.2.0" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.databasecenter_v1beta.DatabaseCenterAsyncClient", + "shortName": "DatabaseCenterAsyncClient" + }, + "fullName": "google.cloud.databasecenter_v1beta.DatabaseCenterAsyncClient.aggregate_fleet", + "method": { + "fullName": "google.cloud.databasecenter.v1beta.DatabaseCenter.AggregateFleet", + "service": { + "fullName": "google.cloud.databasecenter.v1beta.DatabaseCenter", + "shortName": "DatabaseCenter" + }, + "shortName": "AggregateFleet" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.databasecenter_v1beta.types.AggregateFleetRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.databasecenter_v1beta.services.database_center.pagers.AggregateFleetAsyncPager", + "shortName": "aggregate_fleet" + }, + "description": "Sample for AggregateFleet", + "file": "databasecenter_v1beta_generated_database_center_aggregate_fleet_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "databasecenter_v1beta_generated_DatabaseCenter_AggregateFleet_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "databasecenter_v1beta_generated_database_center_aggregate_fleet_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.databasecenter_v1beta.DatabaseCenterClient", + "shortName": "DatabaseCenterClient" + }, + "fullName": "google.cloud.databasecenter_v1beta.DatabaseCenterClient.aggregate_fleet", + "method": { + "fullName": "google.cloud.databasecenter.v1beta.DatabaseCenter.AggregateFleet", + "service": { + "fullName": "google.cloud.databasecenter.v1beta.DatabaseCenter", + "shortName": "DatabaseCenter" + }, + "shortName": "AggregateFleet" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.databasecenter_v1beta.types.AggregateFleetRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.databasecenter_v1beta.services.database_center.pagers.AggregateFleetPager", + "shortName": "aggregate_fleet" + }, + "description": "Sample for AggregateFleet", + "file": "databasecenter_v1beta_generated_database_center_aggregate_fleet_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "databasecenter_v1beta_generated_DatabaseCenter_AggregateFleet_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "databasecenter_v1beta_generated_database_center_aggregate_fleet_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-databasecenter/tests/unit/gapic/databasecenter_v1beta/test_database_center.py b/packages/google-cloud-databasecenter/tests/unit/gapic/databasecenter_v1beta/test_database_center.py index b306880cc624..73c7688b59be 100644 --- a/packages/google-cloud-databasecenter/tests/unit/gapic/databasecenter_v1beta/test_database_center.py +++ b/packages/google-cloud-databasecenter/tests/unit/gapic/databasecenter_v1beta/test_database_center.py @@ -51,6 +51,7 @@ from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account +from google.type import date_pb2 # type: ignore from google.cloud.databasecenter_v1beta.services.database_center import ( DatabaseCenterAsyncClient, @@ -1683,6 +1684,397 @@ async def test_query_products_async_pages(): assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize( + "request_type", + [ + service.AggregateFleetRequest, + dict, + ], +) +def test_aggregate_fleet(request_type, transport: str = "grpc"): + client = DatabaseCenterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.aggregate_fleet), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.AggregateFleetResponse( + resource_groups_total_count=2930, + resource_total_count=2163, + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.aggregate_fleet(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.AggregateFleetRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregateFleetPager) + assert response.resource_groups_total_count == 2930 + assert response.resource_total_count == 2163 + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_aggregate_fleet_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseCenterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.AggregateFleetRequest( + parent="parent_value", + filter="filter_value", + group_by="group_by_value", + order_by="order_by_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.aggregate_fleet), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.aggregate_fleet(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.AggregateFleetRequest( + parent="parent_value", + filter="filter_value", + group_by="group_by_value", + order_by="order_by_value", + page_token="page_token_value", + ) + + +def test_aggregate_fleet_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseCenterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregate_fleet in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregate_fleet] = mock_rpc + request = {} + client.aggregate_fleet(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregate_fleet(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_aggregate_fleet_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseCenterAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.aggregate_fleet + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.aggregate_fleet + ] = mock_rpc + + request = {} + await client.aggregate_fleet(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.aggregate_fleet(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_aggregate_fleet_async( + transport: str = "grpc_asyncio", request_type=service.AggregateFleetRequest +): + client = DatabaseCenterAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.aggregate_fleet), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.AggregateFleetResponse( + resource_groups_total_count=2930, + resource_total_count=2163, + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.aggregate_fleet(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.AggregateFleetRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregateFleetAsyncPager) + assert response.resource_groups_total_count == 2930 + assert response.resource_total_count == 2163 + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_aggregate_fleet_async_from_dict(): + await test_aggregate_fleet_async(request_type=dict) + + +def test_aggregate_fleet_pager(transport_name: str = "grpc"): + client = DatabaseCenterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.aggregate_fleet), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.AggregateFleetResponse( + rows=[ + service.AggregateFleetRow(), + service.AggregateFleetRow(), + service.AggregateFleetRow(), + ], + next_page_token="abc", + ), + service.AggregateFleetResponse( + rows=[], + next_page_token="def", + ), + service.AggregateFleetResponse( + rows=[ + service.AggregateFleetRow(), + ], + next_page_token="ghi", + ), + service.AggregateFleetResponse( + rows=[ + service.AggregateFleetRow(), + service.AggregateFleetRow(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + pager = client.aggregate_fleet(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, service.AggregateFleetRow) for i in results) + + +def test_aggregate_fleet_pages(transport_name: str = "grpc"): + client = DatabaseCenterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.aggregate_fleet), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.AggregateFleetResponse( + rows=[ + service.AggregateFleetRow(), + service.AggregateFleetRow(), + service.AggregateFleetRow(), + ], + next_page_token="abc", + ), + service.AggregateFleetResponse( + rows=[], + next_page_token="def", + ), + service.AggregateFleetResponse( + rows=[ + service.AggregateFleetRow(), + ], + next_page_token="ghi", + ), + service.AggregateFleetResponse( + rows=[ + service.AggregateFleetRow(), + service.AggregateFleetRow(), + ], + ), + RuntimeError, + ) + pages = list(client.aggregate_fleet(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_aggregate_fleet_async_pager(): + client = DatabaseCenterAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.aggregate_fleet), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.AggregateFleetResponse( + rows=[ + service.AggregateFleetRow(), + service.AggregateFleetRow(), + service.AggregateFleetRow(), + ], + next_page_token="abc", + ), + service.AggregateFleetResponse( + rows=[], + next_page_token="def", + ), + service.AggregateFleetResponse( + rows=[ + service.AggregateFleetRow(), + ], + next_page_token="ghi", + ), + service.AggregateFleetResponse( + rows=[ + service.AggregateFleetRow(), + service.AggregateFleetRow(), + ], + ), + RuntimeError, + ) + async_pager = await client.aggregate_fleet( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, service.AggregateFleetRow) for i in responses) + + +@pytest.mark.asyncio +async def test_aggregate_fleet_async_pages(): + client = DatabaseCenterAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.aggregate_fleet), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.AggregateFleetResponse( + rows=[ + service.AggregateFleetRow(), + service.AggregateFleetRow(), + service.AggregateFleetRow(), + ], + next_page_token="abc", + ), + service.AggregateFleetResponse( + rows=[], + next_page_token="def", + ), + service.AggregateFleetResponse( + rows=[ + service.AggregateFleetRow(), + ], + next_page_token="ghi", + ), + service.AggregateFleetResponse( + rows=[ + service.AggregateFleetRow(), + service.AggregateFleetRow(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.aggregate_fleet(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + @pytest.mark.parametrize( "request_type", [ @@ -2057,36 +2449,241 @@ async def test_query_database_resource_groups_async_pages(): ], next_page_token="abc", ), - service.QueryDatabaseResourceGroupsResponse( - resource_groups=[], + service.QueryDatabaseResourceGroupsResponse( + resource_groups=[], + next_page_token="def", + ), + service.QueryDatabaseResourceGroupsResponse( + resource_groups=[ + service.DatabaseResourceGroup(), + ], + next_page_token="ghi", + ), + service.QueryDatabaseResourceGroupsResponse( + resource_groups=[ + service.DatabaseResourceGroup(), + service.DatabaseResourceGroup(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.query_database_resource_groups(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_query_products_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseCenterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.query_products in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.query_products] = mock_rpc + + request = {} + client.query_products(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.query_products(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_query_products_rest_required_fields(request_type=service.QueryProductsRequest): + transport_class = transports.DatabaseCenterRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "parent" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).query_products._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "parent" in jsonified_request + assert jsonified_request["parent"] == request_init["parent"] + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).query_products._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + "parent", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DatabaseCenterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.QueryProductsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.QueryProductsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.query_products(request) + + expected_params = [ + ( + "parent", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_query_products_rest_unset_required_fields(): + transport = transports.DatabaseCenterRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.query_products._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + "parent", + ) + ) + & set(("parent",)) + ) + + +def test_query_products_rest_pager(transport: str = "rest"): + client = DatabaseCenterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.QueryProductsResponse( + products=[ + product.Product(), + product.Product(), + product.Product(), + ], + next_page_token="abc", + ), + service.QueryProductsResponse( + products=[], next_page_token="def", ), - service.QueryDatabaseResourceGroupsResponse( - resource_groups=[ - service.DatabaseResourceGroup(), + service.QueryProductsResponse( + products=[ + product.Product(), ], next_page_token="ghi", ), - service.QueryDatabaseResourceGroupsResponse( - resource_groups=[ - service.DatabaseResourceGroup(), - service.DatabaseResourceGroup(), + service.QueryProductsResponse( + products=[ + product.Product(), + product.Product(), ], ), - RuntimeError, ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.query_database_resource_groups(request={}) - ).pages: - pages.append(page_) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.QueryProductsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.query_products(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, product.Product) for i in results) + + pages = list(client.query_products(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -def test_query_products_rest_use_cached_wrapped_rpc(): +def test_aggregate_fleet_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -2100,29 +2697,31 @@ def test_query_products_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.query_products in client._transport._wrapped_methods + assert client._transport.aggregate_fleet in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.query_products] = mock_rpc + client._transport._wrapped_methods[client._transport.aggregate_fleet] = mock_rpc request = {} - client.query_products(request) + client.aggregate_fleet(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.query_products(request) + client.aggregate_fleet(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_query_products_rest_required_fields(request_type=service.QueryProductsRequest): +def test_aggregate_fleet_rest_required_fields( + request_type=service.AggregateFleetRequest, +): transport_class = transports.DatabaseCenterRestTransport request_init = {} @@ -2138,7 +2737,7 @@ def test_query_products_rest_required_fields(request_type=service.QueryProductsR unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).query_products._get_unset_required_fields(jsonified_request) + ).aggregate_fleet._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -2149,10 +2748,14 @@ def test_query_products_rest_required_fields(request_type=service.QueryProductsR unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).query_products._get_unset_required_fields(jsonified_request) + ).aggregate_fleet._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( + "baseline_date", + "filter", + "group_by", + "order_by", "page_size", "page_token", "parent", @@ -2171,7 +2774,7 @@ def test_query_products_rest_required_fields(request_type=service.QueryProductsR request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = service.QueryProductsResponse() + return_value = service.AggregateFleetResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -2192,14 +2795,14 @@ def test_query_products_rest_required_fields(request_type=service.QueryProductsR response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.QueryProductsResponse.pb(return_value) + return_value = service.AggregateFleetResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.query_products(request) + response = client.aggregate_fleet(request) expected_params = [ ( @@ -2212,15 +2815,19 @@ def test_query_products_rest_required_fields(request_type=service.QueryProductsR assert expected_params == actual_params -def test_query_products_rest_unset_required_fields(): +def test_aggregate_fleet_rest_unset_required_fields(): transport = transports.DatabaseCenterRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.query_products._get_unset_required_fields({}) + unset_fields = transport.aggregate_fleet._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( + "baselineDate", + "filter", + "groupBy", + "orderBy", "pageSize", "pageToken", "parent", @@ -2230,7 +2837,7 @@ def test_query_products_rest_unset_required_fields(): ) -def test_query_products_rest_pager(transport: str = "rest"): +def test_aggregate_fleet_rest_pager(transport: str = "rest"): client = DatabaseCenterClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2242,28 +2849,28 @@ def test_query_products_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - service.QueryProductsResponse( - products=[ - product.Product(), - product.Product(), - product.Product(), + service.AggregateFleetResponse( + rows=[ + service.AggregateFleetRow(), + service.AggregateFleetRow(), + service.AggregateFleetRow(), ], next_page_token="abc", ), - service.QueryProductsResponse( - products=[], + service.AggregateFleetResponse( + rows=[], next_page_token="def", ), - service.QueryProductsResponse( - products=[ - product.Product(), + service.AggregateFleetResponse( + rows=[ + service.AggregateFleetRow(), ], next_page_token="ghi", ), - service.QueryProductsResponse( - products=[ - product.Product(), - product.Product(), + service.AggregateFleetResponse( + rows=[ + service.AggregateFleetRow(), + service.AggregateFleetRow(), ], ), ) @@ -2271,7 +2878,7 @@ def test_query_products_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple(service.QueryProductsResponse.to_json(x) for x in response) + response = tuple(service.AggregateFleetResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") @@ -2280,13 +2887,13 @@ def test_query_products_rest_pager(transport: str = "rest"): sample_request = {} - pager = client.query_products(request=sample_request) + pager = client.aggregate_fleet(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, product.Product) for i in results) + assert all(isinstance(i, service.AggregateFleetRow) for i in results) - pages = list(client.query_products(request=sample_request).pages) + pages = list(client.aggregate_fleet(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2611,6 +3218,27 @@ def test_query_products_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_aggregate_fleet_empty_call_grpc(): + client = DatabaseCenterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.aggregate_fleet), "__call__") as call: + call.return_value = service.AggregateFleetResponse() + client.aggregate_fleet(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.AggregateFleetRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_query_database_resource_groups_empty_call_grpc(): @@ -2676,6 +3304,36 @@ async def test_query_products_empty_call_grpc_asyncio(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_aggregate_fleet_empty_call_grpc_asyncio(): + client = DatabaseCenterAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.aggregate_fleet), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.AggregateFleetResponse( + resource_groups_total_count=2930, + resource_total_count=2163, + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.aggregate_fleet(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.AggregateFleetRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio @@ -2840,6 +3498,137 @@ def test_query_products_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() +def test_aggregate_fleet_rest_bad_request(request_type=service.AggregateFleetRequest): + client = DatabaseCenterClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.aggregate_fleet(request) + + +@pytest.mark.parametrize( + "request_type", + [ + service.AggregateFleetRequest, + dict, + ], +) +def test_aggregate_fleet_rest_call_success(request_type): + client = DatabaseCenterClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.AggregateFleetResponse( + resource_groups_total_count=2930, + resource_total_count=2163, + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.AggregateFleetResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.aggregate_fleet(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregateFleetPager) + assert response.resource_groups_total_count == 2930 + assert response.resource_total_count == 2163 + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregate_fleet_rest_interceptors(null_interceptor): + transport = transports.DatabaseCenterRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatabaseCenterRestInterceptor(), + ) + client = DatabaseCenterClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatabaseCenterRestInterceptor, "post_aggregate_fleet" + ) as post, mock.patch.object( + transports.DatabaseCenterRestInterceptor, "post_aggregate_fleet_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.DatabaseCenterRestInterceptor, "pre_aggregate_fleet" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.AggregateFleetRequest.pb(service.AggregateFleetRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = service.AggregateFleetResponse.to_json( + service.AggregateFleetResponse() + ) + req.return_value.content = return_value + + request = service.AggregateFleetRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.AggregateFleetResponse() + post_with_metadata.return_value = service.AggregateFleetResponse(), metadata + + client.aggregate_fleet( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + def test_query_database_resource_groups_rest_bad_request( request_type=service.QueryDatabaseResourceGroupsRequest, ): @@ -3002,6 +3791,26 @@ def test_query_products_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_aggregate_fleet_empty_call_rest(): + client = DatabaseCenterClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.aggregate_fleet), "__call__") as call: + client.aggregate_fleet(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.AggregateFleetRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_query_database_resource_groups_empty_call_rest(): @@ -3058,6 +3867,7 @@ def test_database_center_base_transport(): # raise NotImplementedError. methods = ( "query_products", + "aggregate_fleet", "query_database_resource_groups", ) for method in methods: @@ -3320,6 +4130,9 @@ def test_database_center_client_transport_session_collision(transport_name): session1 = client1.transport.query_products._session session2 = client2.transport.query_products._session assert session1 != session2 + session1 = client1.transport.aggregate_fleet._session + session2 = client2.transport.aggregate_fleet._session + assert session1 != session2 session1 = client1.transport.query_database_resource_groups._session session2 = client2.transport.query_database_resource_groups._session assert session1 != session2 diff --git a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/gapic_metadata.json b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/gapic_metadata.json index 7ab79835a172..3d2027293d05 100644 --- a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/gapic_metadata.json +++ b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/gapic_metadata.json @@ -15,11 +15,21 @@ "create_data_agent" ] }, + "CreateDataAgentSync": { + "methods": [ + "create_data_agent_sync" + ] + }, "DeleteDataAgent": { "methods": [ "delete_data_agent" ] }, + "DeleteDataAgentSync": { + "methods": [ + "delete_data_agent_sync" + ] + }, "GetDataAgent": { "methods": [ "get_data_agent" @@ -49,6 +59,11 @@ "methods": [ "update_data_agent" ] + }, + "UpdateDataAgentSync": { + "methods": [ + "update_data_agent_sync" + ] } } }, @@ -60,11 +75,21 @@ "create_data_agent" ] }, + "CreateDataAgentSync": { + "methods": [ + "create_data_agent_sync" + ] + }, "DeleteDataAgent": { "methods": [ "delete_data_agent" ] }, + "DeleteDataAgentSync": { + "methods": [ + "delete_data_agent_sync" + ] + }, "GetDataAgent": { "methods": [ "get_data_agent" @@ -94,6 +119,11 @@ "methods": [ "update_data_agent" ] + }, + "UpdateDataAgentSync": { + "methods": [ + "update_data_agent_sync" + ] } } }, @@ -105,11 +135,21 @@ "create_data_agent" ] }, + "CreateDataAgentSync": { + "methods": [ + "create_data_agent_sync" + ] + }, "DeleteDataAgent": { "methods": [ "delete_data_agent" ] }, + "DeleteDataAgentSync": { + "methods": [ + "delete_data_agent_sync" + ] + }, "GetDataAgent": { "methods": [ "get_data_agent" @@ -139,6 +179,11 @@ "methods": [ "update_data_agent" ] + }, + "UpdateDataAgentSync": { + "methods": [ + "update_data_agent_sync" + ] } } } diff --git a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/services/data_agent_service/async_client.py b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/services/data_agent_service/async_client.py index 699b384de77b..cc9d583593db 100644 --- a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/services/data_agent_service/async_client.py +++ b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/services/data_agent_service/async_client.py @@ -813,6 +813,137 @@ async def sample_create_data_agent(): # Done; return the response. return response + async def create_data_agent_sync( + self, + request: Optional[ + Union[data_agent_service.CreateDataAgentRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + data_agent: Optional[gcg_data_agent.DataAgent] = None, + data_agent_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcg_data_agent.DataAgent: + r"""Creates a new DataAgent in a given project and + location synchronously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import geminidataanalytics_v1alpha + + async def sample_create_data_agent_sync(): + # Create a client + client = geminidataanalytics_v1alpha.DataAgentServiceAsyncClient() + + # Initialize request argument(s) + request = geminidataanalytics_v1alpha.CreateDataAgentRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_data_agent_sync(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.geminidataanalytics_v1alpha.types.CreateDataAgentRequest, dict]]): + The request object. Message for creating a DataAgent. + parent (:class:`str`): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_agent (:class:`google.cloud.geminidataanalytics_v1alpha.types.DataAgent`): + Required. The resource being created. + This corresponds to the ``data_agent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_agent_id (:class:`str`): + Optional. Id of the requesting object. Must be unique + within the parent. The allowed format is: + ``^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$``. If not provided, + the server will auto-generate a value for the id. + + This corresponds to the ``data_agent_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.geminidataanalytics_v1alpha.types.DataAgent: + Message describing a DataAgent + object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, data_agent, data_agent_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_agent_service.CreateDataAgentRequest): + request = data_agent_service.CreateDataAgentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_agent is not None: + request.data_agent = data_agent + if data_agent_id is not None: + request.data_agent_id = data_agent_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_data_agent_sync + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def update_data_agent( self, request: Optional[ @@ -956,6 +1087,135 @@ async def sample_update_data_agent(): # Done; return the response. return response + async def update_data_agent_sync( + self, + request: Optional[ + Union[data_agent_service.UpdateDataAgentRequest, dict] + ] = None, + *, + data_agent: Optional[gcg_data_agent.DataAgent] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcg_data_agent.DataAgent: + r"""Updates the parameters of a single DataAgent + synchronously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import geminidataanalytics_v1alpha + + async def sample_update_data_agent_sync(): + # Create a client + client = geminidataanalytics_v1alpha.DataAgentServiceAsyncClient() + + # Initialize request argument(s) + request = geminidataanalytics_v1alpha.UpdateDataAgentRequest( + ) + + # Make the request + response = await client.update_data_agent_sync(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.geminidataanalytics_v1alpha.types.UpdateDataAgentRequest, dict]]): + The request object. Message for updating a DataAgent. + data_agent (:class:`google.cloud.geminidataanalytics_v1alpha.types.DataAgent`): + Required. The resource being updated. + This corresponds to the ``data_agent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask is used to specify the fields to be + overwritten in the DataAgent resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields with non-default values + present in the request will be overwritten. If a + wildcard mask is provided, all fields will be + overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.geminidataanalytics_v1alpha.types.DataAgent: + Message describing a DataAgent + object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [data_agent, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_agent_service.UpdateDataAgentRequest): + request = data_agent_service.UpdateDataAgentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_agent is not None: + request.data_agent = data_agent + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_data_agent_sync + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_agent.name", request.data_agent.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def delete_data_agent( self, request: Optional[ @@ -1088,6 +1348,104 @@ async def sample_delete_data_agent(): # Done; return the response. return response + async def delete_data_agent_sync( + self, + request: Optional[ + Union[data_agent_service.DeleteDataAgentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a single DataAgent synchronously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import geminidataanalytics_v1alpha + + async def sample_delete_data_agent_sync(): + # Create a client + client = geminidataanalytics_v1alpha.DataAgentServiceAsyncClient() + + # Initialize request argument(s) + request = geminidataanalytics_v1alpha.DeleteDataAgentRequest( + name="name_value", + ) + + # Make the request + await client.delete_data_agent_sync(request=request) + + Args: + request (Optional[Union[google.cloud.geminidataanalytics_v1alpha.types.DeleteDataAgentRequest, dict]]): + The request object. Message for deleting a DataAgent. + name (:class:`str`): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_agent_service.DeleteDataAgentRequest): + request = data_agent_service.DeleteDataAgentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_data_agent_sync + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def get_iam_policy( self, request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, diff --git a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/services/data_agent_service/client.py b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/services/data_agent_service/client.py index 85ab1c5ec03f..a48b9f3d71de 100644 --- a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/services/data_agent_service/client.py +++ b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/services/data_agent_service/client.py @@ -1245,6 +1245,134 @@ def sample_create_data_agent(): # Done; return the response. return response + def create_data_agent_sync( + self, + request: Optional[ + Union[data_agent_service.CreateDataAgentRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + data_agent: Optional[gcg_data_agent.DataAgent] = None, + data_agent_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcg_data_agent.DataAgent: + r"""Creates a new DataAgent in a given project and + location synchronously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import geminidataanalytics_v1alpha + + def sample_create_data_agent_sync(): + # Create a client + client = geminidataanalytics_v1alpha.DataAgentServiceClient() + + # Initialize request argument(s) + request = geminidataanalytics_v1alpha.CreateDataAgentRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_data_agent_sync(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.geminidataanalytics_v1alpha.types.CreateDataAgentRequest, dict]): + The request object. Message for creating a DataAgent. + parent (str): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_agent (google.cloud.geminidataanalytics_v1alpha.types.DataAgent): + Required. The resource being created. + This corresponds to the ``data_agent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_agent_id (str): + Optional. Id of the requesting object. Must be unique + within the parent. The allowed format is: + ``^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$``. If not provided, + the server will auto-generate a value for the id. + + This corresponds to the ``data_agent_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.geminidataanalytics_v1alpha.types.DataAgent: + Message describing a DataAgent + object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, data_agent, data_agent_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_agent_service.CreateDataAgentRequest): + request = data_agent_service.CreateDataAgentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_agent is not None: + request.data_agent = data_agent + if data_agent_id is not None: + request.data_agent_id = data_agent_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_data_agent_sync] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def update_data_agent( self, request: Optional[ @@ -1385,6 +1513,132 @@ def sample_update_data_agent(): # Done; return the response. return response + def update_data_agent_sync( + self, + request: Optional[ + Union[data_agent_service.UpdateDataAgentRequest, dict] + ] = None, + *, + data_agent: Optional[gcg_data_agent.DataAgent] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcg_data_agent.DataAgent: + r"""Updates the parameters of a single DataAgent + synchronously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import geminidataanalytics_v1alpha + + def sample_update_data_agent_sync(): + # Create a client + client = geminidataanalytics_v1alpha.DataAgentServiceClient() + + # Initialize request argument(s) + request = geminidataanalytics_v1alpha.UpdateDataAgentRequest( + ) + + # Make the request + response = client.update_data_agent_sync(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.geminidataanalytics_v1alpha.types.UpdateDataAgentRequest, dict]): + The request object. Message for updating a DataAgent. + data_agent (google.cloud.geminidataanalytics_v1alpha.types.DataAgent): + Required. The resource being updated. + This corresponds to the ``data_agent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the DataAgent resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields with non-default values + present in the request will be overwritten. If a + wildcard mask is provided, all fields will be + overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.geminidataanalytics_v1alpha.types.DataAgent: + Message describing a DataAgent + object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [data_agent, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_agent_service.UpdateDataAgentRequest): + request = data_agent_service.UpdateDataAgentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_agent is not None: + request.data_agent = data_agent + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_data_agent_sync] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_agent.name", request.data_agent.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def delete_data_agent( self, request: Optional[ @@ -1514,6 +1768,101 @@ def sample_delete_data_agent(): # Done; return the response. return response + def delete_data_agent_sync( + self, + request: Optional[ + Union[data_agent_service.DeleteDataAgentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a single DataAgent synchronously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import geminidataanalytics_v1alpha + + def sample_delete_data_agent_sync(): + # Create a client + client = geminidataanalytics_v1alpha.DataAgentServiceClient() + + # Initialize request argument(s) + request = geminidataanalytics_v1alpha.DeleteDataAgentRequest( + name="name_value", + ) + + # Make the request + client.delete_data_agent_sync(request=request) + + Args: + request (Union[google.cloud.geminidataanalytics_v1alpha.types.DeleteDataAgentRequest, dict]): + The request object. Message for deleting a DataAgent. + name (str): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_agent_service.DeleteDataAgentRequest): + request = data_agent_service.DeleteDataAgentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_data_agent_sync] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + def get_iam_policy( self, request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, diff --git a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/services/data_agent_service/transports/base.py b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/services/data_agent_service/transports/base.py index c59ea92165c0..3c0cf89f6c39 100644 --- a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/services/data_agent_service/transports/base.py +++ b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/services/data_agent_service/transports/base.py @@ -28,12 +28,12 @@ from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore import google.protobuf +from google.protobuf import empty_pb2 # type: ignore from google.cloud.geminidataanalytics_v1alpha import gapic_version as package_version -from google.cloud.geminidataanalytics_v1alpha.types import ( - data_agent, - data_agent_service, -) +from google.cloud.geminidataanalytics_v1alpha.types import data_agent as gcg_data_agent +from google.cloud.geminidataanalytics_v1alpha.types import data_agent +from google.cloud.geminidataanalytics_v1alpha.types import data_agent_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -160,16 +160,31 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.create_data_agent_sync: gapic_v1.method.wrap_method( + self.create_data_agent_sync, + default_timeout=None, + client_info=client_info, + ), self.update_data_agent: gapic_v1.method.wrap_method( self.update_data_agent, default_timeout=None, client_info=client_info, ), + self.update_data_agent_sync: gapic_v1.method.wrap_method( + self.update_data_agent_sync, + default_timeout=None, + client_info=client_info, + ), self.delete_data_agent: gapic_v1.method.wrap_method( self.delete_data_agent, default_timeout=None, client_info=client_info, ), + self.delete_data_agent_sync: gapic_v1.method.wrap_method( + self.delete_data_agent_sync, + default_timeout=None, + client_info=client_info, + ), self.get_iam_policy: gapic_v1.method.wrap_method( self.get_iam_policy, default_timeout=None, @@ -268,6 +283,15 @@ def create_data_agent( ]: raise NotImplementedError() + @property + def create_data_agent_sync( + self, + ) -> Callable[ + [data_agent_service.CreateDataAgentRequest], + Union[gcg_data_agent.DataAgent, Awaitable[gcg_data_agent.DataAgent]], + ]: + raise NotImplementedError() + @property def update_data_agent( self, @@ -277,6 +301,15 @@ def update_data_agent( ]: raise NotImplementedError() + @property + def update_data_agent_sync( + self, + ) -> Callable[ + [data_agent_service.UpdateDataAgentRequest], + Union[gcg_data_agent.DataAgent, Awaitable[gcg_data_agent.DataAgent]], + ]: + raise NotImplementedError() + @property def delete_data_agent( self, @@ -286,6 +319,15 @@ def delete_data_agent( ]: raise NotImplementedError() + @property + def delete_data_agent_sync( + self, + ) -> Callable[ + [data_agent_service.DeleteDataAgentRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + @property def get_iam_policy( self, diff --git a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/services/data_agent_service/transports/grpc.py b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/services/data_agent_service/transports/grpc.py index ac8f3c094934..8374d94061e0 100644 --- a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/services/data_agent_service/transports/grpc.py +++ b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/services/data_agent_service/transports/grpc.py @@ -27,15 +27,15 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message import grpc # type: ignore import proto # type: ignore -from google.cloud.geminidataanalytics_v1alpha.types import ( - data_agent, - data_agent_service, -) +from google.cloud.geminidataanalytics_v1alpha.types import data_agent as gcg_data_agent +from google.cloud.geminidataanalytics_v1alpha.types import data_agent +from google.cloud.geminidataanalytics_v1alpha.types import data_agent_service from .base import DEFAULT_CLIENT_INFO, DataAgentServiceTransport @@ -461,6 +461,35 @@ def create_data_agent( ) return self._stubs["create_data_agent"] + @property + def create_data_agent_sync( + self, + ) -> Callable[ + [data_agent_service.CreateDataAgentRequest], gcg_data_agent.DataAgent + ]: + r"""Return a callable for the create data agent sync method over gRPC. + + Creates a new DataAgent in a given project and + location synchronously. + + Returns: + Callable[[~.CreateDataAgentRequest], + ~.DataAgent]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_data_agent_sync" not in self._stubs: + self._stubs["create_data_agent_sync"] = self._logged_channel.unary_unary( + "/google.cloud.geminidataanalytics.v1alpha.DataAgentService/CreateDataAgentSync", + request_serializer=data_agent_service.CreateDataAgentRequest.serialize, + response_deserializer=gcg_data_agent.DataAgent.deserialize, + ) + return self._stubs["create_data_agent_sync"] + @property def update_data_agent( self, @@ -489,6 +518,35 @@ def update_data_agent( ) return self._stubs["update_data_agent"] + @property + def update_data_agent_sync( + self, + ) -> Callable[ + [data_agent_service.UpdateDataAgentRequest], gcg_data_agent.DataAgent + ]: + r"""Return a callable for the update data agent sync method over gRPC. + + Updates the parameters of a single DataAgent + synchronously. + + Returns: + Callable[[~.UpdateDataAgentRequest], + ~.DataAgent]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_data_agent_sync" not in self._stubs: + self._stubs["update_data_agent_sync"] = self._logged_channel.unary_unary( + "/google.cloud.geminidataanalytics.v1alpha.DataAgentService/UpdateDataAgentSync", + request_serializer=data_agent_service.UpdateDataAgentRequest.serialize, + response_deserializer=gcg_data_agent.DataAgent.deserialize, + ) + return self._stubs["update_data_agent_sync"] + @property def delete_data_agent( self, @@ -517,6 +575,32 @@ def delete_data_agent( ) return self._stubs["delete_data_agent"] + @property + def delete_data_agent_sync( + self, + ) -> Callable[[data_agent_service.DeleteDataAgentRequest], empty_pb2.Empty]: + r"""Return a callable for the delete data agent sync method over gRPC. + + Deletes a single DataAgent synchronously. + + Returns: + Callable[[~.DeleteDataAgentRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_data_agent_sync" not in self._stubs: + self._stubs["delete_data_agent_sync"] = self._logged_channel.unary_unary( + "/google.cloud.geminidataanalytics.v1alpha.DataAgentService/DeleteDataAgentSync", + request_serializer=data_agent_service.DeleteDataAgentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_data_agent_sync"] + @property def get_iam_policy( self, diff --git a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/services/data_agent_service/transports/grpc_asyncio.py b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/services/data_agent_service/transports/grpc_asyncio.py index d5b9dcec69f9..7422b6b98c07 100644 --- a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/services/data_agent_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/services/data_agent_service/transports/grpc_asyncio.py @@ -29,16 +29,16 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message import grpc # type: ignore from grpc.experimental import aio # type: ignore import proto # type: ignore -from google.cloud.geminidataanalytics_v1alpha.types import ( - data_agent, - data_agent_service, -) +from google.cloud.geminidataanalytics_v1alpha.types import data_agent as gcg_data_agent +from google.cloud.geminidataanalytics_v1alpha.types import data_agent +from google.cloud.geminidataanalytics_v1alpha.types import data_agent_service from .base import DEFAULT_CLIENT_INFO, DataAgentServiceTransport from .grpc import DataAgentServiceGrpcTransport @@ -471,6 +471,35 @@ def create_data_agent( ) return self._stubs["create_data_agent"] + @property + def create_data_agent_sync( + self, + ) -> Callable[ + [data_agent_service.CreateDataAgentRequest], Awaitable[gcg_data_agent.DataAgent] + ]: + r"""Return a callable for the create data agent sync method over gRPC. + + Creates a new DataAgent in a given project and + location synchronously. + + Returns: + Callable[[~.CreateDataAgentRequest], + Awaitable[~.DataAgent]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_data_agent_sync" not in self._stubs: + self._stubs["create_data_agent_sync"] = self._logged_channel.unary_unary( + "/google.cloud.geminidataanalytics.v1alpha.DataAgentService/CreateDataAgentSync", + request_serializer=data_agent_service.CreateDataAgentRequest.serialize, + response_deserializer=gcg_data_agent.DataAgent.deserialize, + ) + return self._stubs["create_data_agent_sync"] + @property def update_data_agent( self, @@ -499,6 +528,35 @@ def update_data_agent( ) return self._stubs["update_data_agent"] + @property + def update_data_agent_sync( + self, + ) -> Callable[ + [data_agent_service.UpdateDataAgentRequest], Awaitable[gcg_data_agent.DataAgent] + ]: + r"""Return a callable for the update data agent sync method over gRPC. + + Updates the parameters of a single DataAgent + synchronously. + + Returns: + Callable[[~.UpdateDataAgentRequest], + Awaitable[~.DataAgent]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_data_agent_sync" not in self._stubs: + self._stubs["update_data_agent_sync"] = self._logged_channel.unary_unary( + "/google.cloud.geminidataanalytics.v1alpha.DataAgentService/UpdateDataAgentSync", + request_serializer=data_agent_service.UpdateDataAgentRequest.serialize, + response_deserializer=gcg_data_agent.DataAgent.deserialize, + ) + return self._stubs["update_data_agent_sync"] + @property def delete_data_agent( self, @@ -527,6 +585,34 @@ def delete_data_agent( ) return self._stubs["delete_data_agent"] + @property + def delete_data_agent_sync( + self, + ) -> Callable[ + [data_agent_service.DeleteDataAgentRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete data agent sync method over gRPC. + + Deletes a single DataAgent synchronously. + + Returns: + Callable[[~.DeleteDataAgentRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_data_agent_sync" not in self._stubs: + self._stubs["delete_data_agent_sync"] = self._logged_channel.unary_unary( + "/google.cloud.geminidataanalytics.v1alpha.DataAgentService/DeleteDataAgentSync", + request_serializer=data_agent_service.DeleteDataAgentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_data_agent_sync"] + @property def get_iam_policy( self, @@ -602,16 +688,31 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.create_data_agent_sync: self._wrap_method( + self.create_data_agent_sync, + default_timeout=None, + client_info=client_info, + ), self.update_data_agent: self._wrap_method( self.update_data_agent, default_timeout=None, client_info=client_info, ), + self.update_data_agent_sync: self._wrap_method( + self.update_data_agent_sync, + default_timeout=None, + client_info=client_info, + ), self.delete_data_agent: self._wrap_method( self.delete_data_agent, default_timeout=None, client_info=client_info, ), + self.delete_data_agent_sync: self._wrap_method( + self.delete_data_agent_sync, + default_timeout=None, + client_info=client_info, + ), self.get_iam_policy: self._wrap_method( self.get_iam_policy, default_timeout=None, diff --git a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/services/data_agent_service/transports/rest.py b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/services/data_agent_service/transports/rest.py index 1c4935d6b860..e593ce9ab0b7 100644 --- a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/services/data_agent_service/transports/rest.py +++ b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/services/data_agent_service/transports/rest.py @@ -29,13 +29,13 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore import google.protobuf +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import json_format from requests import __version__ as requests_version -from google.cloud.geminidataanalytics_v1alpha.types import ( - data_agent, - data_agent_service, -) +from google.cloud.geminidataanalytics_v1alpha.types import data_agent as gcg_data_agent +from google.cloud.geminidataanalytics_v1alpha.types import data_agent +from google.cloud.geminidataanalytics_v1alpha.types import data_agent_service from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO from .rest_base import _BaseDataAgentServiceRestTransport @@ -87,6 +87,14 @@ def post_create_data_agent(self, response): logging.log(f"Received response: {response}") return response + def pre_create_data_agent_sync(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_data_agent_sync(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_data_agent(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -95,6 +103,10 @@ def post_delete_data_agent(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_data_agent_sync(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + def pre_get_data_agent(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -143,6 +155,14 @@ def post_update_data_agent(self, response): logging.log(f"Received response: {response}") return response + def pre_update_data_agent_sync(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_data_agent_sync(self, response): + logging.log(f"Received response: {response}") + return response + transport = DataAgentServiceRestTransport(interceptor=MyCustomDataAgentServiceInterceptor()) client = DataAgentServiceClient(transport=transport) @@ -198,6 +218,55 @@ def post_create_data_agent_with_metadata( """ return response, metadata + def pre_create_data_agent_sync( + self, + request: data_agent_service.CreateDataAgentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + data_agent_service.CreateDataAgentRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for create_data_agent_sync + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataAgentService server. + """ + return request, metadata + + def post_create_data_agent_sync( + self, response: gcg_data_agent.DataAgent + ) -> gcg_data_agent.DataAgent: + """Post-rpc interceptor for create_data_agent_sync + + DEPRECATED. Please use the `post_create_data_agent_sync_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataAgentService server but before + it is returned to user code. This `post_create_data_agent_sync` interceptor runs + before the `post_create_data_agent_sync_with_metadata` interceptor. + """ + return response + + def post_create_data_agent_sync_with_metadata( + self, + response: gcg_data_agent.DataAgent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcg_data_agent.DataAgent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_data_agent_sync + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataAgentService server but before it is returned to user code. + + We recommend only using this `post_create_data_agent_sync_with_metadata` + interceptor in new development instead of the `post_create_data_agent_sync` interceptor. + When both interceptors are used, this `post_create_data_agent_sync_with_metadata` interceptor runs after the + `post_create_data_agent_sync` interceptor. The (possibly modified) response returned by + `post_create_data_agent_sync` will be passed to + `post_create_data_agent_sync_with_metadata`. + """ + return response, metadata + def pre_delete_data_agent( self, request: data_agent_service.DeleteDataAgentRequest, @@ -247,6 +316,21 @@ def post_delete_data_agent_with_metadata( """ return response, metadata + def pre_delete_data_agent_sync( + self, + request: data_agent_service.DeleteDataAgentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + data_agent_service.DeleteDataAgentRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_data_agent_sync + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataAgentService server. + """ + return request, metadata + def pre_get_data_agent( self, request: data_agent_service.GetDataAgentRequest, @@ -540,6 +624,55 @@ def post_update_data_agent_with_metadata( """ return response, metadata + def pre_update_data_agent_sync( + self, + request: data_agent_service.UpdateDataAgentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + data_agent_service.UpdateDataAgentRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_data_agent_sync + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataAgentService server. + """ + return request, metadata + + def post_update_data_agent_sync( + self, response: gcg_data_agent.DataAgent + ) -> gcg_data_agent.DataAgent: + """Post-rpc interceptor for update_data_agent_sync + + DEPRECATED. Please use the `post_update_data_agent_sync_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataAgentService server but before + it is returned to user code. This `post_update_data_agent_sync` interceptor runs + before the `post_update_data_agent_sync_with_metadata` interceptor. + """ + return response + + def post_update_data_agent_sync_with_metadata( + self, + response: gcg_data_agent.DataAgent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcg_data_agent.DataAgent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_data_agent_sync + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataAgentService server but before it is returned to user code. + + We recommend only using this `post_update_data_agent_sync_with_metadata` + interceptor in new development instead of the `post_update_data_agent_sync` interceptor. + When both interceptors are used, this `post_update_data_agent_sync_with_metadata` interceptor runs after the + `post_update_data_agent_sync` interceptor. The (possibly modified) response returned by + `post_update_data_agent_sync` will be passed to + `post_update_data_agent_sync_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -982,6 +1115,161 @@ def __call__( ) return resp + class _CreateDataAgentSync( + _BaseDataAgentServiceRestTransport._BaseCreateDataAgentSync, + DataAgentServiceRestStub, + ): + def __hash__(self): + return hash("DataAgentServiceRestTransport.CreateDataAgentSync") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: data_agent_service.CreateDataAgentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcg_data_agent.DataAgent: + r"""Call the create data agent sync method over HTTP. + + Args: + request (~.data_agent_service.CreateDataAgentRequest): + The request object. Message for creating a DataAgent. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gcg_data_agent.DataAgent: + Message describing a DataAgent + object. + + """ + + http_options = ( + _BaseDataAgentServiceRestTransport._BaseCreateDataAgentSync._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_data_agent_sync( + request, metadata + ) + transcoded_request = _BaseDataAgentServiceRestTransport._BaseCreateDataAgentSync._get_transcoded_request( + http_options, request + ) + + body = _BaseDataAgentServiceRestTransport._BaseCreateDataAgentSync._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseDataAgentServiceRestTransport._BaseCreateDataAgentSync._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.geminidataanalytics_v1alpha.DataAgentServiceClient.CreateDataAgentSync", + extra={ + "serviceName": "google.cloud.geminidataanalytics.v1alpha.DataAgentService", + "rpcName": "CreateDataAgentSync", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataAgentServiceRestTransport._CreateDataAgentSync._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcg_data_agent.DataAgent() + pb_resp = gcg_data_agent.DataAgent.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_data_agent_sync(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_data_agent_sync_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = gcg_data_agent.DataAgent.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.geminidataanalytics_v1alpha.DataAgentServiceClient.create_data_agent_sync", + extra={ + "serviceName": "google.cloud.geminidataanalytics.v1alpha.DataAgentService", + "rpcName": "CreateDataAgentSync", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _DeleteDataAgent( _BaseDataAgentServiceRestTransport._BaseDeleteDataAgent, DataAgentServiceRestStub, @@ -1130,6 +1418,115 @@ def __call__( ) return resp + class _DeleteDataAgentSync( + _BaseDataAgentServiceRestTransport._BaseDeleteDataAgentSync, + DataAgentServiceRestStub, + ): + def __hash__(self): + return hash("DataAgentServiceRestTransport.DeleteDataAgentSync") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: data_agent_service.DeleteDataAgentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the delete data agent sync method over HTTP. + + Args: + request (~.data_agent_service.DeleteDataAgentRequest): + The request object. Message for deleting a DataAgent. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseDataAgentServiceRestTransport._BaseDeleteDataAgentSync._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_data_agent_sync( + request, metadata + ) + transcoded_request = _BaseDataAgentServiceRestTransport._BaseDeleteDataAgentSync._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDataAgentServiceRestTransport._BaseDeleteDataAgentSync._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.geminidataanalytics_v1alpha.DataAgentServiceClient.DeleteDataAgentSync", + extra={ + "serviceName": "google.cloud.geminidataanalytics.v1alpha.DataAgentService", + "rpcName": "DeleteDataAgentSync", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataAgentServiceRestTransport._DeleteDataAgentSync._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + class _GetDataAgent( _BaseDataAgentServiceRestTransport._BaseGetDataAgent, DataAgentServiceRestStub ): @@ -2188,6 +2585,161 @@ def __call__( ) return resp + class _UpdateDataAgentSync( + _BaseDataAgentServiceRestTransport._BaseUpdateDataAgentSync, + DataAgentServiceRestStub, + ): + def __hash__(self): + return hash("DataAgentServiceRestTransport.UpdateDataAgentSync") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: data_agent_service.UpdateDataAgentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcg_data_agent.DataAgent: + r"""Call the update data agent sync method over HTTP. + + Args: + request (~.data_agent_service.UpdateDataAgentRequest): + The request object. Message for updating a DataAgent. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gcg_data_agent.DataAgent: + Message describing a DataAgent + object. + + """ + + http_options = ( + _BaseDataAgentServiceRestTransport._BaseUpdateDataAgentSync._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_data_agent_sync( + request, metadata + ) + transcoded_request = _BaseDataAgentServiceRestTransport._BaseUpdateDataAgentSync._get_transcoded_request( + http_options, request + ) + + body = _BaseDataAgentServiceRestTransport._BaseUpdateDataAgentSync._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseDataAgentServiceRestTransport._BaseUpdateDataAgentSync._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.geminidataanalytics_v1alpha.DataAgentServiceClient.UpdateDataAgentSync", + extra={ + "serviceName": "google.cloud.geminidataanalytics.v1alpha.DataAgentService", + "rpcName": "UpdateDataAgentSync", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataAgentServiceRestTransport._UpdateDataAgentSync._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcg_data_agent.DataAgent() + pb_resp = gcg_data_agent.DataAgent.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_data_agent_sync(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_data_agent_sync_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = gcg_data_agent.DataAgent.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.geminidataanalytics_v1alpha.DataAgentServiceClient.update_data_agent_sync", + extra={ + "serviceName": "google.cloud.geminidataanalytics.v1alpha.DataAgentService", + "rpcName": "UpdateDataAgentSync", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + @property def create_data_agent( self, @@ -2198,6 +2750,16 @@ def create_data_agent( # In C++ this would require a dynamic_cast return self._CreateDataAgent(self._session, self._host, self._interceptor) # type: ignore + @property + def create_data_agent_sync( + self, + ) -> Callable[ + [data_agent_service.CreateDataAgentRequest], gcg_data_agent.DataAgent + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDataAgentSync(self._session, self._host, self._interceptor) # type: ignore + @property def delete_data_agent( self, @@ -2208,6 +2770,14 @@ def delete_data_agent( # In C++ this would require a dynamic_cast return self._DeleteDataAgent(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_data_agent_sync( + self, + ) -> Callable[[data_agent_service.DeleteDataAgentRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDataAgentSync(self._session, self._host, self._interceptor) # type: ignore + @property def get_data_agent( self, @@ -2264,6 +2834,16 @@ def update_data_agent( # In C++ this would require a dynamic_cast return self._UpdateDataAgent(self._session, self._host, self._interceptor) # type: ignore + @property + def update_data_agent_sync( + self, + ) -> Callable[ + [data_agent_service.UpdateDataAgentRequest], gcg_data_agent.DataAgent + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDataAgentSync(self._session, self._host, self._interceptor) # type: ignore + @property def get_location(self): return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/services/data_agent_service/transports/rest_base.py b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/services/data_agent_service/transports/rest_base.py index 35a9b9368b2f..92bef3cf3c92 100644 --- a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/services/data_agent_service/transports/rest_base.py +++ b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1alpha/services/data_agent_service/transports/rest_base.py @@ -22,12 +22,12 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import json_format -from google.cloud.geminidataanalytics_v1alpha.types import ( - data_agent, - data_agent_service, -) +from google.cloud.geminidataanalytics_v1alpha.types import data_agent as gcg_data_agent +from google.cloud.geminidataanalytics_v1alpha.types import data_agent +from google.cloud.geminidataanalytics_v1alpha.types import data_agent_service from .base import DEFAULT_CLIENT_INFO, DataAgentServiceTransport @@ -151,6 +151,63 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseCreateDataAgentSync: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=projects/*/locations/*}/dataAgents:createSync", + "body": "data_agent", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_agent_service.CreateDataAgentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDataAgentServiceRestTransport._BaseCreateDataAgentSync._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseDeleteDataAgent: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -198,6 +255,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseDeleteDataAgentSync: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=projects/*/locations/*/dataAgents/*}:deleteSync", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_agent_service.DeleteDataAgentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDataAgentServiceRestTransport._BaseDeleteDataAgentSync._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGetDataAgent: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -510,6 +614,63 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseUpdateDataAgentSync: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha/{data_agent.name=projects/*/locations/*/dataAgents/*}:updateSync", + "body": "data_agent", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_agent_service.UpdateDataAgentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDataAgentServiceRestTransport._BaseUpdateDataAgentSync._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGetLocation: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/gapic_metadata.json b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/gapic_metadata.json index fa2caecf75ee..00fa81d0a123 100644 --- a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/gapic_metadata.json +++ b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/gapic_metadata.json @@ -15,11 +15,21 @@ "create_data_agent" ] }, + "CreateDataAgentSync": { + "methods": [ + "create_data_agent_sync" + ] + }, "DeleteDataAgent": { "methods": [ "delete_data_agent" ] }, + "DeleteDataAgentSync": { + "methods": [ + "delete_data_agent_sync" + ] + }, "GetDataAgent": { "methods": [ "get_data_agent" @@ -49,6 +59,11 @@ "methods": [ "update_data_agent" ] + }, + "UpdateDataAgentSync": { + "methods": [ + "update_data_agent_sync" + ] } } }, @@ -60,11 +75,21 @@ "create_data_agent" ] }, + "CreateDataAgentSync": { + "methods": [ + "create_data_agent_sync" + ] + }, "DeleteDataAgent": { "methods": [ "delete_data_agent" ] }, + "DeleteDataAgentSync": { + "methods": [ + "delete_data_agent_sync" + ] + }, "GetDataAgent": { "methods": [ "get_data_agent" @@ -94,6 +119,11 @@ "methods": [ "update_data_agent" ] + }, + "UpdateDataAgentSync": { + "methods": [ + "update_data_agent_sync" + ] } } }, @@ -105,11 +135,21 @@ "create_data_agent" ] }, + "CreateDataAgentSync": { + "methods": [ + "create_data_agent_sync" + ] + }, "DeleteDataAgent": { "methods": [ "delete_data_agent" ] }, + "DeleteDataAgentSync": { + "methods": [ + "delete_data_agent_sync" + ] + }, "GetDataAgent": { "methods": [ "get_data_agent" @@ -139,6 +179,11 @@ "methods": [ "update_data_agent" ] + }, + "UpdateDataAgentSync": { + "methods": [ + "update_data_agent_sync" + ] } } } diff --git a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/services/data_agent_service/async_client.py b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/services/data_agent_service/async_client.py index af9199fc9a63..40c9e02b569e 100644 --- a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/services/data_agent_service/async_client.py +++ b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/services/data_agent_service/async_client.py @@ -813,6 +813,137 @@ async def sample_create_data_agent(): # Done; return the response. return response + async def create_data_agent_sync( + self, + request: Optional[ + Union[data_agent_service.CreateDataAgentRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + data_agent: Optional[gcg_data_agent.DataAgent] = None, + data_agent_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcg_data_agent.DataAgent: + r"""Creates a new DataAgent in a given project and + location synchronously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import geminidataanalytics_v1beta + + async def sample_create_data_agent_sync(): + # Create a client + client = geminidataanalytics_v1beta.DataAgentServiceAsyncClient() + + # Initialize request argument(s) + request = geminidataanalytics_v1beta.CreateDataAgentRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_data_agent_sync(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.geminidataanalytics_v1beta.types.CreateDataAgentRequest, dict]]): + The request object. Message for creating a DataAgent. + parent (:class:`str`): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_agent (:class:`google.cloud.geminidataanalytics_v1beta.types.DataAgent`): + Required. The resource being created. + This corresponds to the ``data_agent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_agent_id (:class:`str`): + Optional. Id of the requesting object. Must be unique + within the parent. The allowed format is: + ``^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$``. If not provided, + the server will auto-generate a value for the id. + + This corresponds to the ``data_agent_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.geminidataanalytics_v1beta.types.DataAgent: + Message describing a DataAgent + object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, data_agent, data_agent_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_agent_service.CreateDataAgentRequest): + request = data_agent_service.CreateDataAgentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_agent is not None: + request.data_agent = data_agent + if data_agent_id is not None: + request.data_agent_id = data_agent_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_data_agent_sync + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def update_data_agent( self, request: Optional[ @@ -956,6 +1087,135 @@ async def sample_update_data_agent(): # Done; return the response. return response + async def update_data_agent_sync( + self, + request: Optional[ + Union[data_agent_service.UpdateDataAgentRequest, dict] + ] = None, + *, + data_agent: Optional[gcg_data_agent.DataAgent] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcg_data_agent.DataAgent: + r"""Updates the parameters of a single DataAgent + synchronously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import geminidataanalytics_v1beta + + async def sample_update_data_agent_sync(): + # Create a client + client = geminidataanalytics_v1beta.DataAgentServiceAsyncClient() + + # Initialize request argument(s) + request = geminidataanalytics_v1beta.UpdateDataAgentRequest( + ) + + # Make the request + response = await client.update_data_agent_sync(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.geminidataanalytics_v1beta.types.UpdateDataAgentRequest, dict]]): + The request object. Message for updating a DataAgent. + data_agent (:class:`google.cloud.geminidataanalytics_v1beta.types.DataAgent`): + Required. The resource being updated. + This corresponds to the ``data_agent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask is used to specify the fields to be + overwritten in the DataAgent resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields with non-default values + present in the request will be overwritten. If a + wildcard mask is provided, all fields will be + overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.geminidataanalytics_v1beta.types.DataAgent: + Message describing a DataAgent + object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [data_agent, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_agent_service.UpdateDataAgentRequest): + request = data_agent_service.UpdateDataAgentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_agent is not None: + request.data_agent = data_agent + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_data_agent_sync + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_agent.name", request.data_agent.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def delete_data_agent( self, request: Optional[ @@ -1088,6 +1348,104 @@ async def sample_delete_data_agent(): # Done; return the response. return response + async def delete_data_agent_sync( + self, + request: Optional[ + Union[data_agent_service.DeleteDataAgentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a single DataAgent synchronously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import geminidataanalytics_v1beta + + async def sample_delete_data_agent_sync(): + # Create a client + client = geminidataanalytics_v1beta.DataAgentServiceAsyncClient() + + # Initialize request argument(s) + request = geminidataanalytics_v1beta.DeleteDataAgentRequest( + name="name_value", + ) + + # Make the request + await client.delete_data_agent_sync(request=request) + + Args: + request (Optional[Union[google.cloud.geminidataanalytics_v1beta.types.DeleteDataAgentRequest, dict]]): + The request object. Message for deleting a DataAgent. + name (:class:`str`): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_agent_service.DeleteDataAgentRequest): + request = data_agent_service.DeleteDataAgentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_data_agent_sync + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def get_iam_policy( self, request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, diff --git a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/services/data_agent_service/client.py b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/services/data_agent_service/client.py index da79bd569341..b5643b276713 100644 --- a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/services/data_agent_service/client.py +++ b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/services/data_agent_service/client.py @@ -1245,6 +1245,134 @@ def sample_create_data_agent(): # Done; return the response. return response + def create_data_agent_sync( + self, + request: Optional[ + Union[data_agent_service.CreateDataAgentRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + data_agent: Optional[gcg_data_agent.DataAgent] = None, + data_agent_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcg_data_agent.DataAgent: + r"""Creates a new DataAgent in a given project and + location synchronously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import geminidataanalytics_v1beta + + def sample_create_data_agent_sync(): + # Create a client + client = geminidataanalytics_v1beta.DataAgentServiceClient() + + # Initialize request argument(s) + request = geminidataanalytics_v1beta.CreateDataAgentRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_data_agent_sync(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.geminidataanalytics_v1beta.types.CreateDataAgentRequest, dict]): + The request object. Message for creating a DataAgent. + parent (str): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_agent (google.cloud.geminidataanalytics_v1beta.types.DataAgent): + Required. The resource being created. + This corresponds to the ``data_agent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_agent_id (str): + Optional. Id of the requesting object. Must be unique + within the parent. The allowed format is: + ``^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$``. If not provided, + the server will auto-generate a value for the id. + + This corresponds to the ``data_agent_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.geminidataanalytics_v1beta.types.DataAgent: + Message describing a DataAgent + object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, data_agent, data_agent_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_agent_service.CreateDataAgentRequest): + request = data_agent_service.CreateDataAgentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_agent is not None: + request.data_agent = data_agent + if data_agent_id is not None: + request.data_agent_id = data_agent_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_data_agent_sync] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def update_data_agent( self, request: Optional[ @@ -1385,6 +1513,132 @@ def sample_update_data_agent(): # Done; return the response. return response + def update_data_agent_sync( + self, + request: Optional[ + Union[data_agent_service.UpdateDataAgentRequest, dict] + ] = None, + *, + data_agent: Optional[gcg_data_agent.DataAgent] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcg_data_agent.DataAgent: + r"""Updates the parameters of a single DataAgent + synchronously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import geminidataanalytics_v1beta + + def sample_update_data_agent_sync(): + # Create a client + client = geminidataanalytics_v1beta.DataAgentServiceClient() + + # Initialize request argument(s) + request = geminidataanalytics_v1beta.UpdateDataAgentRequest( + ) + + # Make the request + response = client.update_data_agent_sync(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.geminidataanalytics_v1beta.types.UpdateDataAgentRequest, dict]): + The request object. Message for updating a DataAgent. + data_agent (google.cloud.geminidataanalytics_v1beta.types.DataAgent): + Required. The resource being updated. + This corresponds to the ``data_agent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the DataAgent resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields with non-default values + present in the request will be overwritten. If a + wildcard mask is provided, all fields will be + overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.geminidataanalytics_v1beta.types.DataAgent: + Message describing a DataAgent + object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [data_agent, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_agent_service.UpdateDataAgentRequest): + request = data_agent_service.UpdateDataAgentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_agent is not None: + request.data_agent = data_agent + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_data_agent_sync] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_agent.name", request.data_agent.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def delete_data_agent( self, request: Optional[ @@ -1514,6 +1768,101 @@ def sample_delete_data_agent(): # Done; return the response. return response + def delete_data_agent_sync( + self, + request: Optional[ + Union[data_agent_service.DeleteDataAgentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a single DataAgent synchronously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import geminidataanalytics_v1beta + + def sample_delete_data_agent_sync(): + # Create a client + client = geminidataanalytics_v1beta.DataAgentServiceClient() + + # Initialize request argument(s) + request = geminidataanalytics_v1beta.DeleteDataAgentRequest( + name="name_value", + ) + + # Make the request + client.delete_data_agent_sync(request=request) + + Args: + request (Union[google.cloud.geminidataanalytics_v1beta.types.DeleteDataAgentRequest, dict]): + The request object. Message for deleting a DataAgent. + name (str): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_agent_service.DeleteDataAgentRequest): + request = data_agent_service.DeleteDataAgentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_data_agent_sync] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + def get_iam_policy( self, request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, diff --git a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/services/data_agent_service/transports/base.py b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/services/data_agent_service/transports/base.py index 1b1cc6762aae..f8df1736328b 100644 --- a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/services/data_agent_service/transports/base.py +++ b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/services/data_agent_service/transports/base.py @@ -28,9 +28,12 @@ from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore import google.protobuf +from google.protobuf import empty_pb2 # type: ignore from google.cloud.geminidataanalytics_v1beta import gapic_version as package_version -from google.cloud.geminidataanalytics_v1beta.types import data_agent, data_agent_service +from google.cloud.geminidataanalytics_v1beta.types import data_agent as gcg_data_agent +from google.cloud.geminidataanalytics_v1beta.types import data_agent +from google.cloud.geminidataanalytics_v1beta.types import data_agent_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -157,16 +160,31 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.create_data_agent_sync: gapic_v1.method.wrap_method( + self.create_data_agent_sync, + default_timeout=None, + client_info=client_info, + ), self.update_data_agent: gapic_v1.method.wrap_method( self.update_data_agent, default_timeout=None, client_info=client_info, ), + self.update_data_agent_sync: gapic_v1.method.wrap_method( + self.update_data_agent_sync, + default_timeout=None, + client_info=client_info, + ), self.delete_data_agent: gapic_v1.method.wrap_method( self.delete_data_agent, default_timeout=None, client_info=client_info, ), + self.delete_data_agent_sync: gapic_v1.method.wrap_method( + self.delete_data_agent_sync, + default_timeout=None, + client_info=client_info, + ), self.get_iam_policy: gapic_v1.method.wrap_method( self.get_iam_policy, default_timeout=None, @@ -265,6 +283,15 @@ def create_data_agent( ]: raise NotImplementedError() + @property + def create_data_agent_sync( + self, + ) -> Callable[ + [data_agent_service.CreateDataAgentRequest], + Union[gcg_data_agent.DataAgent, Awaitable[gcg_data_agent.DataAgent]], + ]: + raise NotImplementedError() + @property def update_data_agent( self, @@ -274,6 +301,15 @@ def update_data_agent( ]: raise NotImplementedError() + @property + def update_data_agent_sync( + self, + ) -> Callable[ + [data_agent_service.UpdateDataAgentRequest], + Union[gcg_data_agent.DataAgent, Awaitable[gcg_data_agent.DataAgent]], + ]: + raise NotImplementedError() + @property def delete_data_agent( self, @@ -283,6 +319,15 @@ def delete_data_agent( ]: raise NotImplementedError() + @property + def delete_data_agent_sync( + self, + ) -> Callable[ + [data_agent_service.DeleteDataAgentRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + @property def get_iam_policy( self, diff --git a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/services/data_agent_service/transports/grpc.py b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/services/data_agent_service/transports/grpc.py index 1a1994dbf512..ade5a9abc6dc 100644 --- a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/services/data_agent_service/transports/grpc.py +++ b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/services/data_agent_service/transports/grpc.py @@ -27,12 +27,15 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message import grpc # type: ignore import proto # type: ignore -from google.cloud.geminidataanalytics_v1beta.types import data_agent, data_agent_service +from google.cloud.geminidataanalytics_v1beta.types import data_agent as gcg_data_agent +from google.cloud.geminidataanalytics_v1beta.types import data_agent +from google.cloud.geminidataanalytics_v1beta.types import data_agent_service from .base import DEFAULT_CLIENT_INFO, DataAgentServiceTransport @@ -458,6 +461,35 @@ def create_data_agent( ) return self._stubs["create_data_agent"] + @property + def create_data_agent_sync( + self, + ) -> Callable[ + [data_agent_service.CreateDataAgentRequest], gcg_data_agent.DataAgent + ]: + r"""Return a callable for the create data agent sync method over gRPC. + + Creates a new DataAgent in a given project and + location synchronously. + + Returns: + Callable[[~.CreateDataAgentRequest], + ~.DataAgent]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_data_agent_sync" not in self._stubs: + self._stubs["create_data_agent_sync"] = self._logged_channel.unary_unary( + "/google.cloud.geminidataanalytics.v1beta.DataAgentService/CreateDataAgentSync", + request_serializer=data_agent_service.CreateDataAgentRequest.serialize, + response_deserializer=gcg_data_agent.DataAgent.deserialize, + ) + return self._stubs["create_data_agent_sync"] + @property def update_data_agent( self, @@ -486,6 +518,35 @@ def update_data_agent( ) return self._stubs["update_data_agent"] + @property + def update_data_agent_sync( + self, + ) -> Callable[ + [data_agent_service.UpdateDataAgentRequest], gcg_data_agent.DataAgent + ]: + r"""Return a callable for the update data agent sync method over gRPC. + + Updates the parameters of a single DataAgent + synchronously. + + Returns: + Callable[[~.UpdateDataAgentRequest], + ~.DataAgent]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_data_agent_sync" not in self._stubs: + self._stubs["update_data_agent_sync"] = self._logged_channel.unary_unary( + "/google.cloud.geminidataanalytics.v1beta.DataAgentService/UpdateDataAgentSync", + request_serializer=data_agent_service.UpdateDataAgentRequest.serialize, + response_deserializer=gcg_data_agent.DataAgent.deserialize, + ) + return self._stubs["update_data_agent_sync"] + @property def delete_data_agent( self, @@ -514,6 +575,32 @@ def delete_data_agent( ) return self._stubs["delete_data_agent"] + @property + def delete_data_agent_sync( + self, + ) -> Callable[[data_agent_service.DeleteDataAgentRequest], empty_pb2.Empty]: + r"""Return a callable for the delete data agent sync method over gRPC. + + Deletes a single DataAgent synchronously. + + Returns: + Callable[[~.DeleteDataAgentRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_data_agent_sync" not in self._stubs: + self._stubs["delete_data_agent_sync"] = self._logged_channel.unary_unary( + "/google.cloud.geminidataanalytics.v1beta.DataAgentService/DeleteDataAgentSync", + request_serializer=data_agent_service.DeleteDataAgentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_data_agent_sync"] + @property def get_iam_policy( self, diff --git a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/services/data_agent_service/transports/grpc_asyncio.py b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/services/data_agent_service/transports/grpc_asyncio.py index 240951b913c5..bea89f44fd66 100644 --- a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/services/data_agent_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/services/data_agent_service/transports/grpc_asyncio.py @@ -29,13 +29,16 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message import grpc # type: ignore from grpc.experimental import aio # type: ignore import proto # type: ignore -from google.cloud.geminidataanalytics_v1beta.types import data_agent, data_agent_service +from google.cloud.geminidataanalytics_v1beta.types import data_agent as gcg_data_agent +from google.cloud.geminidataanalytics_v1beta.types import data_agent +from google.cloud.geminidataanalytics_v1beta.types import data_agent_service from .base import DEFAULT_CLIENT_INFO, DataAgentServiceTransport from .grpc import DataAgentServiceGrpcTransport @@ -468,6 +471,35 @@ def create_data_agent( ) return self._stubs["create_data_agent"] + @property + def create_data_agent_sync( + self, + ) -> Callable[ + [data_agent_service.CreateDataAgentRequest], Awaitable[gcg_data_agent.DataAgent] + ]: + r"""Return a callable for the create data agent sync method over gRPC. + + Creates a new DataAgent in a given project and + location synchronously. + + Returns: + Callable[[~.CreateDataAgentRequest], + Awaitable[~.DataAgent]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_data_agent_sync" not in self._stubs: + self._stubs["create_data_agent_sync"] = self._logged_channel.unary_unary( + "/google.cloud.geminidataanalytics.v1beta.DataAgentService/CreateDataAgentSync", + request_serializer=data_agent_service.CreateDataAgentRequest.serialize, + response_deserializer=gcg_data_agent.DataAgent.deserialize, + ) + return self._stubs["create_data_agent_sync"] + @property def update_data_agent( self, @@ -496,6 +528,35 @@ def update_data_agent( ) return self._stubs["update_data_agent"] + @property + def update_data_agent_sync( + self, + ) -> Callable[ + [data_agent_service.UpdateDataAgentRequest], Awaitable[gcg_data_agent.DataAgent] + ]: + r"""Return a callable for the update data agent sync method over gRPC. + + Updates the parameters of a single DataAgent + synchronously. + + Returns: + Callable[[~.UpdateDataAgentRequest], + Awaitable[~.DataAgent]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_data_agent_sync" not in self._stubs: + self._stubs["update_data_agent_sync"] = self._logged_channel.unary_unary( + "/google.cloud.geminidataanalytics.v1beta.DataAgentService/UpdateDataAgentSync", + request_serializer=data_agent_service.UpdateDataAgentRequest.serialize, + response_deserializer=gcg_data_agent.DataAgent.deserialize, + ) + return self._stubs["update_data_agent_sync"] + @property def delete_data_agent( self, @@ -524,6 +585,34 @@ def delete_data_agent( ) return self._stubs["delete_data_agent"] + @property + def delete_data_agent_sync( + self, + ) -> Callable[ + [data_agent_service.DeleteDataAgentRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete data agent sync method over gRPC. + + Deletes a single DataAgent synchronously. + + Returns: + Callable[[~.DeleteDataAgentRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_data_agent_sync" not in self._stubs: + self._stubs["delete_data_agent_sync"] = self._logged_channel.unary_unary( + "/google.cloud.geminidataanalytics.v1beta.DataAgentService/DeleteDataAgentSync", + request_serializer=data_agent_service.DeleteDataAgentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_data_agent_sync"] + @property def get_iam_policy( self, @@ -599,16 +688,31 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.create_data_agent_sync: self._wrap_method( + self.create_data_agent_sync, + default_timeout=None, + client_info=client_info, + ), self.update_data_agent: self._wrap_method( self.update_data_agent, default_timeout=None, client_info=client_info, ), + self.update_data_agent_sync: self._wrap_method( + self.update_data_agent_sync, + default_timeout=None, + client_info=client_info, + ), self.delete_data_agent: self._wrap_method( self.delete_data_agent, default_timeout=None, client_info=client_info, ), + self.delete_data_agent_sync: self._wrap_method( + self.delete_data_agent_sync, + default_timeout=None, + client_info=client_info, + ), self.get_iam_policy: self._wrap_method( self.get_iam_policy, default_timeout=None, diff --git a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/services/data_agent_service/transports/rest.py b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/services/data_agent_service/transports/rest.py index 2d1dd8e948e2..007694e8f7c7 100644 --- a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/services/data_agent_service/transports/rest.py +++ b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/services/data_agent_service/transports/rest.py @@ -29,10 +29,13 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore import google.protobuf +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import json_format from requests import __version__ as requests_version -from google.cloud.geminidataanalytics_v1beta.types import data_agent, data_agent_service +from google.cloud.geminidataanalytics_v1beta.types import data_agent as gcg_data_agent +from google.cloud.geminidataanalytics_v1beta.types import data_agent +from google.cloud.geminidataanalytics_v1beta.types import data_agent_service from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO from .rest_base import _BaseDataAgentServiceRestTransport @@ -84,6 +87,14 @@ def post_create_data_agent(self, response): logging.log(f"Received response: {response}") return response + def pre_create_data_agent_sync(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_data_agent_sync(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_data_agent(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -92,6 +103,10 @@ def post_delete_data_agent(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_data_agent_sync(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + def pre_get_data_agent(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -140,6 +155,14 @@ def post_update_data_agent(self, response): logging.log(f"Received response: {response}") return response + def pre_update_data_agent_sync(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_data_agent_sync(self, response): + logging.log(f"Received response: {response}") + return response + transport = DataAgentServiceRestTransport(interceptor=MyCustomDataAgentServiceInterceptor()) client = DataAgentServiceClient(transport=transport) @@ -195,6 +218,55 @@ def post_create_data_agent_with_metadata( """ return response, metadata + def pre_create_data_agent_sync( + self, + request: data_agent_service.CreateDataAgentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + data_agent_service.CreateDataAgentRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for create_data_agent_sync + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataAgentService server. + """ + return request, metadata + + def post_create_data_agent_sync( + self, response: gcg_data_agent.DataAgent + ) -> gcg_data_agent.DataAgent: + """Post-rpc interceptor for create_data_agent_sync + + DEPRECATED. Please use the `post_create_data_agent_sync_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataAgentService server but before + it is returned to user code. This `post_create_data_agent_sync` interceptor runs + before the `post_create_data_agent_sync_with_metadata` interceptor. + """ + return response + + def post_create_data_agent_sync_with_metadata( + self, + response: gcg_data_agent.DataAgent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcg_data_agent.DataAgent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_data_agent_sync + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataAgentService server but before it is returned to user code. + + We recommend only using this `post_create_data_agent_sync_with_metadata` + interceptor in new development instead of the `post_create_data_agent_sync` interceptor. + When both interceptors are used, this `post_create_data_agent_sync_with_metadata` interceptor runs after the + `post_create_data_agent_sync` interceptor. The (possibly modified) response returned by + `post_create_data_agent_sync` will be passed to + `post_create_data_agent_sync_with_metadata`. + """ + return response, metadata + def pre_delete_data_agent( self, request: data_agent_service.DeleteDataAgentRequest, @@ -244,6 +316,21 @@ def post_delete_data_agent_with_metadata( """ return response, metadata + def pre_delete_data_agent_sync( + self, + request: data_agent_service.DeleteDataAgentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + data_agent_service.DeleteDataAgentRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_data_agent_sync + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataAgentService server. + """ + return request, metadata + def pre_get_data_agent( self, request: data_agent_service.GetDataAgentRequest, @@ -537,6 +624,55 @@ def post_update_data_agent_with_metadata( """ return response, metadata + def pre_update_data_agent_sync( + self, + request: data_agent_service.UpdateDataAgentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + data_agent_service.UpdateDataAgentRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_data_agent_sync + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataAgentService server. + """ + return request, metadata + + def post_update_data_agent_sync( + self, response: gcg_data_agent.DataAgent + ) -> gcg_data_agent.DataAgent: + """Post-rpc interceptor for update_data_agent_sync + + DEPRECATED. Please use the `post_update_data_agent_sync_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataAgentService server but before + it is returned to user code. This `post_update_data_agent_sync` interceptor runs + before the `post_update_data_agent_sync_with_metadata` interceptor. + """ + return response + + def post_update_data_agent_sync_with_metadata( + self, + response: gcg_data_agent.DataAgent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcg_data_agent.DataAgent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_data_agent_sync + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataAgentService server but before it is returned to user code. + + We recommend only using this `post_update_data_agent_sync_with_metadata` + interceptor in new development instead of the `post_update_data_agent_sync` interceptor. + When both interceptors are used, this `post_update_data_agent_sync_with_metadata` interceptor runs after the + `post_update_data_agent_sync` interceptor. The (possibly modified) response returned by + `post_update_data_agent_sync` will be passed to + `post_update_data_agent_sync_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -979,6 +1115,161 @@ def __call__( ) return resp + class _CreateDataAgentSync( + _BaseDataAgentServiceRestTransport._BaseCreateDataAgentSync, + DataAgentServiceRestStub, + ): + def __hash__(self): + return hash("DataAgentServiceRestTransport.CreateDataAgentSync") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: data_agent_service.CreateDataAgentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcg_data_agent.DataAgent: + r"""Call the create data agent sync method over HTTP. + + Args: + request (~.data_agent_service.CreateDataAgentRequest): + The request object. Message for creating a DataAgent. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gcg_data_agent.DataAgent: + Message describing a DataAgent + object. + + """ + + http_options = ( + _BaseDataAgentServiceRestTransport._BaseCreateDataAgentSync._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_data_agent_sync( + request, metadata + ) + transcoded_request = _BaseDataAgentServiceRestTransport._BaseCreateDataAgentSync._get_transcoded_request( + http_options, request + ) + + body = _BaseDataAgentServiceRestTransport._BaseCreateDataAgentSync._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseDataAgentServiceRestTransport._BaseCreateDataAgentSync._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.geminidataanalytics_v1beta.DataAgentServiceClient.CreateDataAgentSync", + extra={ + "serviceName": "google.cloud.geminidataanalytics.v1beta.DataAgentService", + "rpcName": "CreateDataAgentSync", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataAgentServiceRestTransport._CreateDataAgentSync._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcg_data_agent.DataAgent() + pb_resp = gcg_data_agent.DataAgent.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_data_agent_sync(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_data_agent_sync_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = gcg_data_agent.DataAgent.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.geminidataanalytics_v1beta.DataAgentServiceClient.create_data_agent_sync", + extra={ + "serviceName": "google.cloud.geminidataanalytics.v1beta.DataAgentService", + "rpcName": "CreateDataAgentSync", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _DeleteDataAgent( _BaseDataAgentServiceRestTransport._BaseDeleteDataAgent, DataAgentServiceRestStub, @@ -1127,6 +1418,115 @@ def __call__( ) return resp + class _DeleteDataAgentSync( + _BaseDataAgentServiceRestTransport._BaseDeleteDataAgentSync, + DataAgentServiceRestStub, + ): + def __hash__(self): + return hash("DataAgentServiceRestTransport.DeleteDataAgentSync") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: data_agent_service.DeleteDataAgentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the delete data agent sync method over HTTP. + + Args: + request (~.data_agent_service.DeleteDataAgentRequest): + The request object. Message for deleting a DataAgent. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseDataAgentServiceRestTransport._BaseDeleteDataAgentSync._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_data_agent_sync( + request, metadata + ) + transcoded_request = _BaseDataAgentServiceRestTransport._BaseDeleteDataAgentSync._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDataAgentServiceRestTransport._BaseDeleteDataAgentSync._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.geminidataanalytics_v1beta.DataAgentServiceClient.DeleteDataAgentSync", + extra={ + "serviceName": "google.cloud.geminidataanalytics.v1beta.DataAgentService", + "rpcName": "DeleteDataAgentSync", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataAgentServiceRestTransport._DeleteDataAgentSync._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + class _GetDataAgent( _BaseDataAgentServiceRestTransport._BaseGetDataAgent, DataAgentServiceRestStub ): @@ -2185,6 +2585,161 @@ def __call__( ) return resp + class _UpdateDataAgentSync( + _BaseDataAgentServiceRestTransport._BaseUpdateDataAgentSync, + DataAgentServiceRestStub, + ): + def __hash__(self): + return hash("DataAgentServiceRestTransport.UpdateDataAgentSync") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: data_agent_service.UpdateDataAgentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcg_data_agent.DataAgent: + r"""Call the update data agent sync method over HTTP. + + Args: + request (~.data_agent_service.UpdateDataAgentRequest): + The request object. Message for updating a DataAgent. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gcg_data_agent.DataAgent: + Message describing a DataAgent + object. + + """ + + http_options = ( + _BaseDataAgentServiceRestTransport._BaseUpdateDataAgentSync._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_data_agent_sync( + request, metadata + ) + transcoded_request = _BaseDataAgentServiceRestTransport._BaseUpdateDataAgentSync._get_transcoded_request( + http_options, request + ) + + body = _BaseDataAgentServiceRestTransport._BaseUpdateDataAgentSync._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseDataAgentServiceRestTransport._BaseUpdateDataAgentSync._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.geminidataanalytics_v1beta.DataAgentServiceClient.UpdateDataAgentSync", + extra={ + "serviceName": "google.cloud.geminidataanalytics.v1beta.DataAgentService", + "rpcName": "UpdateDataAgentSync", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataAgentServiceRestTransport._UpdateDataAgentSync._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcg_data_agent.DataAgent() + pb_resp = gcg_data_agent.DataAgent.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_data_agent_sync(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_data_agent_sync_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = gcg_data_agent.DataAgent.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.geminidataanalytics_v1beta.DataAgentServiceClient.update_data_agent_sync", + extra={ + "serviceName": "google.cloud.geminidataanalytics.v1beta.DataAgentService", + "rpcName": "UpdateDataAgentSync", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + @property def create_data_agent( self, @@ -2195,6 +2750,16 @@ def create_data_agent( # In C++ this would require a dynamic_cast return self._CreateDataAgent(self._session, self._host, self._interceptor) # type: ignore + @property + def create_data_agent_sync( + self, + ) -> Callable[ + [data_agent_service.CreateDataAgentRequest], gcg_data_agent.DataAgent + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDataAgentSync(self._session, self._host, self._interceptor) # type: ignore + @property def delete_data_agent( self, @@ -2205,6 +2770,14 @@ def delete_data_agent( # In C++ this would require a dynamic_cast return self._DeleteDataAgent(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_data_agent_sync( + self, + ) -> Callable[[data_agent_service.DeleteDataAgentRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDataAgentSync(self._session, self._host, self._interceptor) # type: ignore + @property def get_data_agent( self, @@ -2261,6 +2834,16 @@ def update_data_agent( # In C++ this would require a dynamic_cast return self._UpdateDataAgent(self._session, self._host, self._interceptor) # type: ignore + @property + def update_data_agent_sync( + self, + ) -> Callable[ + [data_agent_service.UpdateDataAgentRequest], gcg_data_agent.DataAgent + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDataAgentSync(self._session, self._host, self._interceptor) # type: ignore + @property def get_location(self): return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/services/data_agent_service/transports/rest_base.py b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/services/data_agent_service/transports/rest_base.py index 88e5b78b1b80..85131c14d035 100644 --- a/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/services/data_agent_service/transports/rest_base.py +++ b/packages/google-cloud-geminidataanalytics/google/cloud/geminidataanalytics_v1beta/services/data_agent_service/transports/rest_base.py @@ -22,9 +22,12 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import json_format -from google.cloud.geminidataanalytics_v1beta.types import data_agent, data_agent_service +from google.cloud.geminidataanalytics_v1beta.types import data_agent as gcg_data_agent +from google.cloud.geminidataanalytics_v1beta.types import data_agent +from google.cloud.geminidataanalytics_v1beta.types import data_agent_service from .base import DEFAULT_CLIENT_INFO, DataAgentServiceTransport @@ -148,6 +151,63 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseCreateDataAgentSync: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*}/dataAgents:createSync", + "body": "data_agent", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_agent_service.CreateDataAgentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDataAgentServiceRestTransport._BaseCreateDataAgentSync._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseDeleteDataAgent: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -195,6 +255,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseDeleteDataAgentSync: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/dataAgents/*}:deleteSync", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_agent_service.DeleteDataAgentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDataAgentServiceRestTransport._BaseDeleteDataAgentSync._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGetDataAgent: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -507,6 +614,63 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseUpdateDataAgentSync: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1beta/{data_agent.name=projects/*/locations/*/dataAgents/*}:updateSync", + "body": "data_agent", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_agent_service.UpdateDataAgentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDataAgentServiceRestTransport._BaseUpdateDataAgentSync._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGetLocation: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1alpha_generated_data_agent_service_create_data_agent_sync_async.py b/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1alpha_generated_data_agent_service_create_data_agent_sync_async.py new file mode 100644 index 000000000000..1ab0331462ee --- /dev/null +++ b/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1alpha_generated_data_agent_service_create_data_agent_sync_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataAgentSync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-geminidataanalytics + + +# [START geminidataanalytics_v1alpha_generated_DataAgentService_CreateDataAgentSync_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import geminidataanalytics_v1alpha + + +async def sample_create_data_agent_sync(): + # Create a client + client = geminidataanalytics_v1alpha.DataAgentServiceAsyncClient() + + # Initialize request argument(s) + request = geminidataanalytics_v1alpha.CreateDataAgentRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_data_agent_sync(request=request) + + # Handle the response + print(response) + + +# [END geminidataanalytics_v1alpha_generated_DataAgentService_CreateDataAgentSync_async] diff --git a/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1alpha_generated_data_agent_service_create_data_agent_sync_sync.py b/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1alpha_generated_data_agent_service_create_data_agent_sync_sync.py new file mode 100644 index 000000000000..43c7d811e1eb --- /dev/null +++ b/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1alpha_generated_data_agent_service_create_data_agent_sync_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataAgentSync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-geminidataanalytics + + +# [START geminidataanalytics_v1alpha_generated_DataAgentService_CreateDataAgentSync_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import geminidataanalytics_v1alpha + + +def sample_create_data_agent_sync(): + # Create a client + client = geminidataanalytics_v1alpha.DataAgentServiceClient() + + # Initialize request argument(s) + request = geminidataanalytics_v1alpha.CreateDataAgentRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_data_agent_sync(request=request) + + # Handle the response + print(response) + + +# [END geminidataanalytics_v1alpha_generated_DataAgentService_CreateDataAgentSync_sync] diff --git a/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1alpha_generated_data_agent_service_delete_data_agent_sync_async.py b/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1alpha_generated_data_agent_service_delete_data_agent_sync_async.py new file mode 100644 index 000000000000..f86700f06ac9 --- /dev/null +++ b/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1alpha_generated_data_agent_service_delete_data_agent_sync_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataAgentSync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-geminidataanalytics + + +# [START geminidataanalytics_v1alpha_generated_DataAgentService_DeleteDataAgentSync_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import geminidataanalytics_v1alpha + + +async def sample_delete_data_agent_sync(): + # Create a client + client = geminidataanalytics_v1alpha.DataAgentServiceAsyncClient() + + # Initialize request argument(s) + request = geminidataanalytics_v1alpha.DeleteDataAgentRequest( + name="name_value", + ) + + # Make the request + await client.delete_data_agent_sync(request=request) + + +# [END geminidataanalytics_v1alpha_generated_DataAgentService_DeleteDataAgentSync_async] diff --git a/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1alpha_generated_data_agent_service_delete_data_agent_sync_sync.py b/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1alpha_generated_data_agent_service_delete_data_agent_sync_sync.py new file mode 100644 index 000000000000..a6ec2fa7a629 --- /dev/null +++ b/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1alpha_generated_data_agent_service_delete_data_agent_sync_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataAgentSync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-geminidataanalytics + + +# [START geminidataanalytics_v1alpha_generated_DataAgentService_DeleteDataAgentSync_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import geminidataanalytics_v1alpha + + +def sample_delete_data_agent_sync(): + # Create a client + client = geminidataanalytics_v1alpha.DataAgentServiceClient() + + # Initialize request argument(s) + request = geminidataanalytics_v1alpha.DeleteDataAgentRequest( + name="name_value", + ) + + # Make the request + client.delete_data_agent_sync(request=request) + + +# [END geminidataanalytics_v1alpha_generated_DataAgentService_DeleteDataAgentSync_sync] diff --git a/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1alpha_generated_data_agent_service_update_data_agent_sync_async.py b/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1alpha_generated_data_agent_service_update_data_agent_sync_async.py new file mode 100644 index 000000000000..18ff428421ae --- /dev/null +++ b/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1alpha_generated_data_agent_service_update_data_agent_sync_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataAgentSync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-geminidataanalytics + + +# [START geminidataanalytics_v1alpha_generated_DataAgentService_UpdateDataAgentSync_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import geminidataanalytics_v1alpha + + +async def sample_update_data_agent_sync(): + # Create a client + client = geminidataanalytics_v1alpha.DataAgentServiceAsyncClient() + + # Initialize request argument(s) + request = geminidataanalytics_v1alpha.UpdateDataAgentRequest() + + # Make the request + response = await client.update_data_agent_sync(request=request) + + # Handle the response + print(response) + + +# [END geminidataanalytics_v1alpha_generated_DataAgentService_UpdateDataAgentSync_async] diff --git a/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1alpha_generated_data_agent_service_update_data_agent_sync_sync.py b/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1alpha_generated_data_agent_service_update_data_agent_sync_sync.py new file mode 100644 index 000000000000..e2b48cc41bd8 --- /dev/null +++ b/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1alpha_generated_data_agent_service_update_data_agent_sync_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataAgentSync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-geminidataanalytics + + +# [START geminidataanalytics_v1alpha_generated_DataAgentService_UpdateDataAgentSync_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import geminidataanalytics_v1alpha + + +def sample_update_data_agent_sync(): + # Create a client + client = geminidataanalytics_v1alpha.DataAgentServiceClient() + + # Initialize request argument(s) + request = geminidataanalytics_v1alpha.UpdateDataAgentRequest() + + # Make the request + response = client.update_data_agent_sync(request=request) + + # Handle the response + print(response) + + +# [END geminidataanalytics_v1alpha_generated_DataAgentService_UpdateDataAgentSync_sync] diff --git a/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1beta_generated_data_agent_service_create_data_agent_sync_async.py b/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1beta_generated_data_agent_service_create_data_agent_sync_async.py new file mode 100644 index 000000000000..e22cf497b3c8 --- /dev/null +++ b/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1beta_generated_data_agent_service_create_data_agent_sync_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataAgentSync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-geminidataanalytics + + +# [START geminidataanalytics_v1beta_generated_DataAgentService_CreateDataAgentSync_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import geminidataanalytics_v1beta + + +async def sample_create_data_agent_sync(): + # Create a client + client = geminidataanalytics_v1beta.DataAgentServiceAsyncClient() + + # Initialize request argument(s) + request = geminidataanalytics_v1beta.CreateDataAgentRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_data_agent_sync(request=request) + + # Handle the response + print(response) + + +# [END geminidataanalytics_v1beta_generated_DataAgentService_CreateDataAgentSync_async] diff --git a/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1beta_generated_data_agent_service_create_data_agent_sync_sync.py b/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1beta_generated_data_agent_service_create_data_agent_sync_sync.py new file mode 100644 index 000000000000..e31d42a4a2e1 --- /dev/null +++ b/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1beta_generated_data_agent_service_create_data_agent_sync_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataAgentSync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-geminidataanalytics + + +# [START geminidataanalytics_v1beta_generated_DataAgentService_CreateDataAgentSync_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import geminidataanalytics_v1beta + + +def sample_create_data_agent_sync(): + # Create a client + client = geminidataanalytics_v1beta.DataAgentServiceClient() + + # Initialize request argument(s) + request = geminidataanalytics_v1beta.CreateDataAgentRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_data_agent_sync(request=request) + + # Handle the response + print(response) + + +# [END geminidataanalytics_v1beta_generated_DataAgentService_CreateDataAgentSync_sync] diff --git a/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1beta_generated_data_agent_service_delete_data_agent_sync_async.py b/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1beta_generated_data_agent_service_delete_data_agent_sync_async.py new file mode 100644 index 000000000000..c9667d472170 --- /dev/null +++ b/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1beta_generated_data_agent_service_delete_data_agent_sync_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataAgentSync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-geminidataanalytics + + +# [START geminidataanalytics_v1beta_generated_DataAgentService_DeleteDataAgentSync_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import geminidataanalytics_v1beta + + +async def sample_delete_data_agent_sync(): + # Create a client + client = geminidataanalytics_v1beta.DataAgentServiceAsyncClient() + + # Initialize request argument(s) + request = geminidataanalytics_v1beta.DeleteDataAgentRequest( + name="name_value", + ) + + # Make the request + await client.delete_data_agent_sync(request=request) + + +# [END geminidataanalytics_v1beta_generated_DataAgentService_DeleteDataAgentSync_async] diff --git a/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1beta_generated_data_agent_service_delete_data_agent_sync_sync.py b/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1beta_generated_data_agent_service_delete_data_agent_sync_sync.py new file mode 100644 index 000000000000..b89bd293a2e2 --- /dev/null +++ b/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1beta_generated_data_agent_service_delete_data_agent_sync_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataAgentSync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-geminidataanalytics + + +# [START geminidataanalytics_v1beta_generated_DataAgentService_DeleteDataAgentSync_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import geminidataanalytics_v1beta + + +def sample_delete_data_agent_sync(): + # Create a client + client = geminidataanalytics_v1beta.DataAgentServiceClient() + + # Initialize request argument(s) + request = geminidataanalytics_v1beta.DeleteDataAgentRequest( + name="name_value", + ) + + # Make the request + client.delete_data_agent_sync(request=request) + + +# [END geminidataanalytics_v1beta_generated_DataAgentService_DeleteDataAgentSync_sync] diff --git a/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1beta_generated_data_agent_service_update_data_agent_sync_async.py b/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1beta_generated_data_agent_service_update_data_agent_sync_async.py new file mode 100644 index 000000000000..032554e7d0f8 --- /dev/null +++ b/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1beta_generated_data_agent_service_update_data_agent_sync_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataAgentSync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-geminidataanalytics + + +# [START geminidataanalytics_v1beta_generated_DataAgentService_UpdateDataAgentSync_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import geminidataanalytics_v1beta + + +async def sample_update_data_agent_sync(): + # Create a client + client = geminidataanalytics_v1beta.DataAgentServiceAsyncClient() + + # Initialize request argument(s) + request = geminidataanalytics_v1beta.UpdateDataAgentRequest() + + # Make the request + response = await client.update_data_agent_sync(request=request) + + # Handle the response + print(response) + + +# [END geminidataanalytics_v1beta_generated_DataAgentService_UpdateDataAgentSync_async] diff --git a/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1beta_generated_data_agent_service_update_data_agent_sync_sync.py b/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1beta_generated_data_agent_service_update_data_agent_sync_sync.py new file mode 100644 index 000000000000..fb5e19fa19e3 --- /dev/null +++ b/packages/google-cloud-geminidataanalytics/samples/generated_samples/geminidataanalytics_v1beta_generated_data_agent_service_update_data_agent_sync_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataAgentSync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-geminidataanalytics + + +# [START geminidataanalytics_v1beta_generated_DataAgentService_UpdateDataAgentSync_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import geminidataanalytics_v1beta + + +def sample_update_data_agent_sync(): + # Create a client + client = geminidataanalytics_v1beta.DataAgentServiceClient() + + # Initialize request argument(s) + request = geminidataanalytics_v1beta.UpdateDataAgentRequest() + + # Make the request + response = client.update_data_agent_sync(request=request) + + # Handle the response + print(response) + + +# [END geminidataanalytics_v1beta_generated_DataAgentService_UpdateDataAgentSync_sync] diff --git a/packages/google-cloud-geminidataanalytics/samples/generated_samples/snippet_metadata_google.cloud.geminidataanalytics.v1alpha.json b/packages/google-cloud-geminidataanalytics/samples/generated_samples/snippet_metadata_google.cloud.geminidataanalytics.v1alpha.json index 25992bd63f4d..d5606fd316ee 100644 --- a/packages/google-cloud-geminidataanalytics/samples/generated_samples/snippet_metadata_google.cloud.geminidataanalytics.v1alpha.json +++ b/packages/google-cloud-geminidataanalytics/samples/generated_samples/snippet_metadata_google.cloud.geminidataanalytics.v1alpha.json @@ -11,6 +11,183 @@ "version": "0.8.0" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.geminidataanalytics_v1alpha.DataAgentServiceAsyncClient", + "shortName": "DataAgentServiceAsyncClient" + }, + "fullName": "google.cloud.geminidataanalytics_v1alpha.DataAgentServiceAsyncClient.create_data_agent_sync", + "method": { + "fullName": "google.cloud.geminidataanalytics.v1alpha.DataAgentService.CreateDataAgentSync", + "service": { + "fullName": "google.cloud.geminidataanalytics.v1alpha.DataAgentService", + "shortName": "DataAgentService" + }, + "shortName": "CreateDataAgentSync" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.geminidataanalytics_v1alpha.types.CreateDataAgentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "data_agent", + "type": "google.cloud.geminidataanalytics_v1alpha.types.DataAgent" + }, + { + "name": "data_agent_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.geminidataanalytics_v1alpha.types.DataAgent", + "shortName": "create_data_agent_sync" + }, + "description": "Sample for CreateDataAgentSync", + "file": "geminidataanalytics_v1alpha_generated_data_agent_service_create_data_agent_sync_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "geminidataanalytics_v1alpha_generated_DataAgentService_CreateDataAgentSync_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "geminidataanalytics_v1alpha_generated_data_agent_service_create_data_agent_sync_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.geminidataanalytics_v1alpha.DataAgentServiceClient", + "shortName": "DataAgentServiceClient" + }, + "fullName": "google.cloud.geminidataanalytics_v1alpha.DataAgentServiceClient.create_data_agent_sync", + "method": { + "fullName": "google.cloud.geminidataanalytics.v1alpha.DataAgentService.CreateDataAgentSync", + "service": { + "fullName": "google.cloud.geminidataanalytics.v1alpha.DataAgentService", + "shortName": "DataAgentService" + }, + "shortName": "CreateDataAgentSync" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.geminidataanalytics_v1alpha.types.CreateDataAgentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "data_agent", + "type": "google.cloud.geminidataanalytics_v1alpha.types.DataAgent" + }, + { + "name": "data_agent_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.geminidataanalytics_v1alpha.types.DataAgent", + "shortName": "create_data_agent_sync" + }, + "description": "Sample for CreateDataAgentSync", + "file": "geminidataanalytics_v1alpha_generated_data_agent_service_create_data_agent_sync_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "geminidataanalytics_v1alpha_generated_DataAgentService_CreateDataAgentSync_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "geminidataanalytics_v1alpha_generated_data_agent_service_create_data_agent_sync_sync.py" + }, { "canonical": true, "clientMethod": { @@ -188,6 +365,161 @@ ], "title": "geminidataanalytics_v1alpha_generated_data_agent_service_create_data_agent_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.geminidataanalytics_v1alpha.DataAgentServiceAsyncClient", + "shortName": "DataAgentServiceAsyncClient" + }, + "fullName": "google.cloud.geminidataanalytics_v1alpha.DataAgentServiceAsyncClient.delete_data_agent_sync", + "method": { + "fullName": "google.cloud.geminidataanalytics.v1alpha.DataAgentService.DeleteDataAgentSync", + "service": { + "fullName": "google.cloud.geminidataanalytics.v1alpha.DataAgentService", + "shortName": "DataAgentService" + }, + "shortName": "DeleteDataAgentSync" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.geminidataanalytics_v1alpha.types.DeleteDataAgentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_data_agent_sync" + }, + "description": "Sample for DeleteDataAgentSync", + "file": "geminidataanalytics_v1alpha_generated_data_agent_service_delete_data_agent_sync_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "geminidataanalytics_v1alpha_generated_DataAgentService_DeleteDataAgentSync_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "geminidataanalytics_v1alpha_generated_data_agent_service_delete_data_agent_sync_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.geminidataanalytics_v1alpha.DataAgentServiceClient", + "shortName": "DataAgentServiceClient" + }, + "fullName": "google.cloud.geminidataanalytics_v1alpha.DataAgentServiceClient.delete_data_agent_sync", + "method": { + "fullName": "google.cloud.geminidataanalytics.v1alpha.DataAgentService.DeleteDataAgentSync", + "service": { + "fullName": "google.cloud.geminidataanalytics.v1alpha.DataAgentService", + "shortName": "DataAgentService" + }, + "shortName": "DeleteDataAgentSync" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.geminidataanalytics_v1alpha.types.DeleteDataAgentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_data_agent_sync" + }, + "description": "Sample for DeleteDataAgentSync", + "file": "geminidataanalytics_v1alpha_generated_data_agent_service_delete_data_agent_sync_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "geminidataanalytics_v1alpha_generated_DataAgentService_DeleteDataAgentSync_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "geminidataanalytics_v1alpha_generated_data_agent_service_delete_data_agent_sync_sync.py" + }, { "canonical": true, "clientMethod": { @@ -1154,6 +1486,175 @@ ], "title": "geminidataanalytics_v1alpha_generated_data_agent_service_set_iam_policy_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.geminidataanalytics_v1alpha.DataAgentServiceAsyncClient", + "shortName": "DataAgentServiceAsyncClient" + }, + "fullName": "google.cloud.geminidataanalytics_v1alpha.DataAgentServiceAsyncClient.update_data_agent_sync", + "method": { + "fullName": "google.cloud.geminidataanalytics.v1alpha.DataAgentService.UpdateDataAgentSync", + "service": { + "fullName": "google.cloud.geminidataanalytics.v1alpha.DataAgentService", + "shortName": "DataAgentService" + }, + "shortName": "UpdateDataAgentSync" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.geminidataanalytics_v1alpha.types.UpdateDataAgentRequest" + }, + { + "name": "data_agent", + "type": "google.cloud.geminidataanalytics_v1alpha.types.DataAgent" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.geminidataanalytics_v1alpha.types.DataAgent", + "shortName": "update_data_agent_sync" + }, + "description": "Sample for UpdateDataAgentSync", + "file": "geminidataanalytics_v1alpha_generated_data_agent_service_update_data_agent_sync_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "geminidataanalytics_v1alpha_generated_DataAgentService_UpdateDataAgentSync_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "geminidataanalytics_v1alpha_generated_data_agent_service_update_data_agent_sync_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.geminidataanalytics_v1alpha.DataAgentServiceClient", + "shortName": "DataAgentServiceClient" + }, + "fullName": "google.cloud.geminidataanalytics_v1alpha.DataAgentServiceClient.update_data_agent_sync", + "method": { + "fullName": "google.cloud.geminidataanalytics.v1alpha.DataAgentService.UpdateDataAgentSync", + "service": { + "fullName": "google.cloud.geminidataanalytics.v1alpha.DataAgentService", + "shortName": "DataAgentService" + }, + "shortName": "UpdateDataAgentSync" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.geminidataanalytics_v1alpha.types.UpdateDataAgentRequest" + }, + { + "name": "data_agent", + "type": "google.cloud.geminidataanalytics_v1alpha.types.DataAgent" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.geminidataanalytics_v1alpha.types.DataAgent", + "shortName": "update_data_agent_sync" + }, + "description": "Sample for UpdateDataAgentSync", + "file": "geminidataanalytics_v1alpha_generated_data_agent_service_update_data_agent_sync_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "geminidataanalytics_v1alpha_generated_DataAgentService_UpdateDataAgentSync_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "geminidataanalytics_v1alpha_generated_data_agent_service_update_data_agent_sync_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-geminidataanalytics/samples/generated_samples/snippet_metadata_google.cloud.geminidataanalytics.v1beta.json b/packages/google-cloud-geminidataanalytics/samples/generated_samples/snippet_metadata_google.cloud.geminidataanalytics.v1beta.json index 5ee4e28bb97a..10070c846007 100644 --- a/packages/google-cloud-geminidataanalytics/samples/generated_samples/snippet_metadata_google.cloud.geminidataanalytics.v1beta.json +++ b/packages/google-cloud-geminidataanalytics/samples/generated_samples/snippet_metadata_google.cloud.geminidataanalytics.v1beta.json @@ -11,6 +11,183 @@ "version": "0.8.0" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.geminidataanalytics_v1beta.DataAgentServiceAsyncClient", + "shortName": "DataAgentServiceAsyncClient" + }, + "fullName": "google.cloud.geminidataanalytics_v1beta.DataAgentServiceAsyncClient.create_data_agent_sync", + "method": { + "fullName": "google.cloud.geminidataanalytics.v1beta.DataAgentService.CreateDataAgentSync", + "service": { + "fullName": "google.cloud.geminidataanalytics.v1beta.DataAgentService", + "shortName": "DataAgentService" + }, + "shortName": "CreateDataAgentSync" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.geminidataanalytics_v1beta.types.CreateDataAgentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "data_agent", + "type": "google.cloud.geminidataanalytics_v1beta.types.DataAgent" + }, + { + "name": "data_agent_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.geminidataanalytics_v1beta.types.DataAgent", + "shortName": "create_data_agent_sync" + }, + "description": "Sample for CreateDataAgentSync", + "file": "geminidataanalytics_v1beta_generated_data_agent_service_create_data_agent_sync_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "geminidataanalytics_v1beta_generated_DataAgentService_CreateDataAgentSync_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "geminidataanalytics_v1beta_generated_data_agent_service_create_data_agent_sync_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.geminidataanalytics_v1beta.DataAgentServiceClient", + "shortName": "DataAgentServiceClient" + }, + "fullName": "google.cloud.geminidataanalytics_v1beta.DataAgentServiceClient.create_data_agent_sync", + "method": { + "fullName": "google.cloud.geminidataanalytics.v1beta.DataAgentService.CreateDataAgentSync", + "service": { + "fullName": "google.cloud.geminidataanalytics.v1beta.DataAgentService", + "shortName": "DataAgentService" + }, + "shortName": "CreateDataAgentSync" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.geminidataanalytics_v1beta.types.CreateDataAgentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "data_agent", + "type": "google.cloud.geminidataanalytics_v1beta.types.DataAgent" + }, + { + "name": "data_agent_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.geminidataanalytics_v1beta.types.DataAgent", + "shortName": "create_data_agent_sync" + }, + "description": "Sample for CreateDataAgentSync", + "file": "geminidataanalytics_v1beta_generated_data_agent_service_create_data_agent_sync_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "geminidataanalytics_v1beta_generated_DataAgentService_CreateDataAgentSync_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "geminidataanalytics_v1beta_generated_data_agent_service_create_data_agent_sync_sync.py" + }, { "canonical": true, "clientMethod": { @@ -188,6 +365,161 @@ ], "title": "geminidataanalytics_v1beta_generated_data_agent_service_create_data_agent_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.geminidataanalytics_v1beta.DataAgentServiceAsyncClient", + "shortName": "DataAgentServiceAsyncClient" + }, + "fullName": "google.cloud.geminidataanalytics_v1beta.DataAgentServiceAsyncClient.delete_data_agent_sync", + "method": { + "fullName": "google.cloud.geminidataanalytics.v1beta.DataAgentService.DeleteDataAgentSync", + "service": { + "fullName": "google.cloud.geminidataanalytics.v1beta.DataAgentService", + "shortName": "DataAgentService" + }, + "shortName": "DeleteDataAgentSync" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.geminidataanalytics_v1beta.types.DeleteDataAgentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_data_agent_sync" + }, + "description": "Sample for DeleteDataAgentSync", + "file": "geminidataanalytics_v1beta_generated_data_agent_service_delete_data_agent_sync_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "geminidataanalytics_v1beta_generated_DataAgentService_DeleteDataAgentSync_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "geminidataanalytics_v1beta_generated_data_agent_service_delete_data_agent_sync_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.geminidataanalytics_v1beta.DataAgentServiceClient", + "shortName": "DataAgentServiceClient" + }, + "fullName": "google.cloud.geminidataanalytics_v1beta.DataAgentServiceClient.delete_data_agent_sync", + "method": { + "fullName": "google.cloud.geminidataanalytics.v1beta.DataAgentService.DeleteDataAgentSync", + "service": { + "fullName": "google.cloud.geminidataanalytics.v1beta.DataAgentService", + "shortName": "DataAgentService" + }, + "shortName": "DeleteDataAgentSync" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.geminidataanalytics_v1beta.types.DeleteDataAgentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_data_agent_sync" + }, + "description": "Sample for DeleteDataAgentSync", + "file": "geminidataanalytics_v1beta_generated_data_agent_service_delete_data_agent_sync_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "geminidataanalytics_v1beta_generated_DataAgentService_DeleteDataAgentSync_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "geminidataanalytics_v1beta_generated_data_agent_service_delete_data_agent_sync_sync.py" + }, { "canonical": true, "clientMethod": { @@ -1154,6 +1486,175 @@ ], "title": "geminidataanalytics_v1beta_generated_data_agent_service_set_iam_policy_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.geminidataanalytics_v1beta.DataAgentServiceAsyncClient", + "shortName": "DataAgentServiceAsyncClient" + }, + "fullName": "google.cloud.geminidataanalytics_v1beta.DataAgentServiceAsyncClient.update_data_agent_sync", + "method": { + "fullName": "google.cloud.geminidataanalytics.v1beta.DataAgentService.UpdateDataAgentSync", + "service": { + "fullName": "google.cloud.geminidataanalytics.v1beta.DataAgentService", + "shortName": "DataAgentService" + }, + "shortName": "UpdateDataAgentSync" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.geminidataanalytics_v1beta.types.UpdateDataAgentRequest" + }, + { + "name": "data_agent", + "type": "google.cloud.geminidataanalytics_v1beta.types.DataAgent" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.geminidataanalytics_v1beta.types.DataAgent", + "shortName": "update_data_agent_sync" + }, + "description": "Sample for UpdateDataAgentSync", + "file": "geminidataanalytics_v1beta_generated_data_agent_service_update_data_agent_sync_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "geminidataanalytics_v1beta_generated_DataAgentService_UpdateDataAgentSync_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "geminidataanalytics_v1beta_generated_data_agent_service_update_data_agent_sync_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.geminidataanalytics_v1beta.DataAgentServiceClient", + "shortName": "DataAgentServiceClient" + }, + "fullName": "google.cloud.geminidataanalytics_v1beta.DataAgentServiceClient.update_data_agent_sync", + "method": { + "fullName": "google.cloud.geminidataanalytics.v1beta.DataAgentService.UpdateDataAgentSync", + "service": { + "fullName": "google.cloud.geminidataanalytics.v1beta.DataAgentService", + "shortName": "DataAgentService" + }, + "shortName": "UpdateDataAgentSync" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.geminidataanalytics_v1beta.types.UpdateDataAgentRequest" + }, + { + "name": "data_agent", + "type": "google.cloud.geminidataanalytics_v1beta.types.DataAgent" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.geminidataanalytics_v1beta.types.DataAgent", + "shortName": "update_data_agent_sync" + }, + "description": "Sample for UpdateDataAgentSync", + "file": "geminidataanalytics_v1beta_generated_data_agent_service_update_data_agent_sync_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "geminidataanalytics_v1beta_generated_DataAgentService_UpdateDataAgentSync_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "geminidataanalytics_v1beta_generated_data_agent_service_update_data_agent_sync_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-geminidataanalytics/tests/unit/gapic/geminidataanalytics_v1alpha/test_data_agent_service.py b/packages/google-cloud-geminidataanalytics/tests/unit/gapic/geminidataanalytics_v1alpha/test_data_agent_service.py index 15f9f05f9307..59f5cd43d4d3 100644 --- a/packages/google-cloud-geminidataanalytics/tests/unit/gapic/geminidataanalytics_v1alpha/test_data_agent_service.py +++ b/packages/google-cloud-geminidataanalytics/tests/unit/gapic/geminidataanalytics_v1alpha/test_data_agent_service.py @@ -3189,11 +3189,11 @@ async def test_create_data_agent_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - data_agent_service.UpdateDataAgentRequest, + data_agent_service.CreateDataAgentRequest, dict, ], ) -def test_update_data_agent(request_type, transport: str = "grpc"): +def test_create_data_agent_sync(request_type, transport: str = "grpc"): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3205,23 +3205,30 @@ def test_update_data_agent(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_agent), "__call__" + type(client.transport.create_data_agent_sync), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_data_agent(request) + call.return_value = gcg_data_agent.DataAgent( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + response = client.create_data_agent_sync(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = data_agent_service.UpdateDataAgentRequest() + request = data_agent_service.CreateDataAgentRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, gcg_data_agent.DataAgent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" -def test_update_data_agent_non_empty_request_with_auto_populated_field(): +def test_create_data_agent_sync_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = DataAgentServiceClient( @@ -3232,22 +3239,28 @@ def test_update_data_agent_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = data_agent_service.UpdateDataAgentRequest() + request = data_agent_service.CreateDataAgentRequest( + parent="parent_value", + data_agent_id="data_agent_id_value", + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_agent), "__call__" + type(client.transport.create_data_agent_sync), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_data_agent(request=request) + client.create_data_agent_sync(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == data_agent_service.UpdateDataAgentRequest() + assert args[0] == data_agent_service.CreateDataAgentRequest( + parent="parent_value", + data_agent_id="data_agent_id_value", + ) -def test_update_data_agent_use_cached_wrapped_rpc(): +def test_create_data_agent_sync_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3261,7 +3274,10 @@ def test_update_data_agent_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_data_agent in client._transport._wrapped_methods + assert ( + client._transport.create_data_agent_sync + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -3269,20 +3285,15 @@ def test_update_data_agent_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_data_agent + client._transport.create_data_agent_sync ] = mock_rpc request = {} - client.update_data_agent(request) + client.create_data_agent_sync(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_data_agent(request) + client.create_data_agent_sync(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3290,7 +3301,7 @@ def test_update_data_agent_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_data_agent_async_use_cached_wrapped_rpc( +async def test_create_data_agent_sync_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3307,7 +3318,7 @@ async def test_update_data_agent_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_data_agent + client._client._transport.create_data_agent_sync in client._client._transport._wrapped_methods ) @@ -3315,21 +3326,16 @@ async def test_update_data_agent_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_data_agent + client._client._transport.create_data_agent_sync ] = mock_rpc request = {} - await client.update_data_agent(request) + await client.create_data_agent_sync(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_data_agent(request) + await client.create_data_agent_sync(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3337,9 +3343,9 @@ async def test_update_data_agent_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_data_agent_async( +async def test_create_data_agent_sync_async( transport: str = "grpc_asyncio", - request_type=data_agent_service.UpdateDataAgentRequest, + request_type=data_agent_service.CreateDataAgentRequest, ): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -3352,46 +3358,53 @@ async def test_update_data_agent_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_agent), "__call__" + type(client.transport.create_data_agent_sync), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + gcg_data_agent.DataAgent( + name="name_value", + display_name="display_name_value", + description="description_value", + ) ) - response = await client.update_data_agent(request) + response = await client.create_data_agent_sync(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = data_agent_service.UpdateDataAgentRequest() + request = data_agent_service.CreateDataAgentRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, gcg_data_agent.DataAgent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" @pytest.mark.asyncio -async def test_update_data_agent_async_from_dict(): - await test_update_data_agent_async(request_type=dict) +async def test_create_data_agent_sync_async_from_dict(): + await test_create_data_agent_sync_async(request_type=dict) -def test_update_data_agent_field_headers(): +def test_create_data_agent_sync_field_headers(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = data_agent_service.UpdateDataAgentRequest() + request = data_agent_service.CreateDataAgentRequest() - request.data_agent.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_agent), "__call__" + type(client.transport.create_data_agent_sync), "__call__" ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_data_agent(request) + call.return_value = gcg_data_agent.DataAgent() + client.create_data_agent_sync(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3402,30 +3415,30 @@ def test_update_data_agent_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "data_agent.name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_data_agent_field_headers_async(): +async def test_create_data_agent_sync_field_headers_async(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = data_agent_service.UpdateDataAgentRequest() + request = data_agent_service.CreateDataAgentRequest() - request.data_agent.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_agent), "__call__" + type(client.transport.create_data_agent_sync), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + gcg_data_agent.DataAgent() ) - await client.update_data_agent(request) + await client.create_data_agent_sync(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3436,24 +3449,25 @@ async def test_update_data_agent_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "data_agent.name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_data_agent_flattened(): +def test_create_data_agent_sync_flattened(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_agent), "__call__" + type(client.transport.create_data_agent_sync), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = gcg_data_agent.DataAgent() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_data_agent( + client.create_data_agent_sync( + parent="parent_value", data_agent=gcg_data_agent.DataAgent( data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( staging_context=context.Context( @@ -3461,13 +3475,16 @@ def test_update_data_agent_flattened(): ) ) ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + data_agent_id="data_agent_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val arg = args[0].data_agent mock_val = gcg_data_agent.DataAgent( data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( @@ -3477,12 +3494,12 @@ def test_update_data_agent_flattened(): ) ) assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].data_agent_id + mock_val = "data_agent_id_value" assert arg == mock_val -def test_update_data_agent_flattened_error(): +def test_create_data_agent_sync_flattened_error(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3490,8 +3507,9 @@ def test_update_data_agent_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_data_agent( - data_agent_service.UpdateDataAgentRequest(), + client.create_data_agent_sync( + data_agent_service.CreateDataAgentRequest(), + parent="parent_value", data_agent=gcg_data_agent.DataAgent( data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( staging_context=context.Context( @@ -3499,29 +3517,30 @@ def test_update_data_agent_flattened_error(): ) ) ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + data_agent_id="data_agent_id_value", ) @pytest.mark.asyncio -async def test_update_data_agent_flattened_async(): +async def test_create_data_agent_sync_flattened_async(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_agent), "__call__" + type(client.transport.create_data_agent_sync), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = gcg_data_agent.DataAgent() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + gcg_data_agent.DataAgent() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_data_agent( + response = await client.create_data_agent_sync( + parent="parent_value", data_agent=gcg_data_agent.DataAgent( data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( staging_context=context.Context( @@ -3529,13 +3548,16 @@ async def test_update_data_agent_flattened_async(): ) ) ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + data_agent_id="data_agent_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val arg = args[0].data_agent mock_val = gcg_data_agent.DataAgent( data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( @@ -3545,13 +3567,13 @@ async def test_update_data_agent_flattened_async(): ) ) assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].data_agent_id + mock_val = "data_agent_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_data_agent_flattened_error_async(): +async def test_create_data_agent_sync_flattened_error_async(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -3559,8 +3581,9 @@ async def test_update_data_agent_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_data_agent( - data_agent_service.UpdateDataAgentRequest(), + await client.create_data_agent_sync( + data_agent_service.CreateDataAgentRequest(), + parent="parent_value", data_agent=gcg_data_agent.DataAgent( data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( staging_context=context.Context( @@ -3568,18 +3591,18 @@ async def test_update_data_agent_flattened_error_async(): ) ) ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + data_agent_id="data_agent_id_value", ) @pytest.mark.parametrize( "request_type", [ - data_agent_service.DeleteDataAgentRequest, + data_agent_service.UpdateDataAgentRequest, dict, ], ) -def test_delete_data_agent(request_type, transport: str = "grpc"): +def test_update_data_agent(request_type, transport: str = "grpc"): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3591,23 +3614,23 @@ def test_delete_data_agent(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_data_agent), "__call__" + type(client.transport.update_data_agent), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_data_agent(request) + response = client.update_data_agent(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = data_agent_service.DeleteDataAgentRequest() + request = data_agent_service.UpdateDataAgentRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_delete_data_agent_non_empty_request_with_auto_populated_field(): +def test_update_data_agent_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = DataAgentServiceClient( @@ -3618,26 +3641,22 @@ def test_delete_data_agent_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = data_agent_service.DeleteDataAgentRequest( - name="name_value", - ) + request = data_agent_service.UpdateDataAgentRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_data_agent), "__call__" + type(client.transport.update_data_agent), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_data_agent(request=request) + client.update_data_agent(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == data_agent_service.DeleteDataAgentRequest( - name="name_value", - ) + assert args[0] == data_agent_service.UpdateDataAgentRequest() -def test_delete_data_agent_use_cached_wrapped_rpc(): +def test_update_data_agent_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3651,7 +3670,7 @@ def test_delete_data_agent_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_data_agent in client._transport._wrapped_methods + assert client._transport.update_data_agent in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -3659,10 +3678,10 @@ def test_delete_data_agent_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_data_agent + client._transport.update_data_agent ] = mock_rpc request = {} - client.delete_data_agent(request) + client.update_data_agent(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -3672,7 +3691,7 @@ def test_delete_data_agent_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_data_agent(request) + client.update_data_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3680,7 +3699,7 @@ def test_delete_data_agent_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_data_agent_async_use_cached_wrapped_rpc( +async def test_update_data_agent_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3697,7 +3716,7 @@ async def test_delete_data_agent_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_data_agent + client._client._transport.update_data_agent in client._client._transport._wrapped_methods ) @@ -3705,11 +3724,11 @@ async def test_delete_data_agent_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_data_agent + client._client._transport.update_data_agent ] = mock_rpc request = {} - await client.delete_data_agent(request) + await client.update_data_agent(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -3719,7 +3738,7 @@ async def test_delete_data_agent_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.delete_data_agent(request) + await client.update_data_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3727,9 +3746,9 @@ async def test_delete_data_agent_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_data_agent_async( +async def test_update_data_agent_async( transport: str = "grpc_asyncio", - request_type=data_agent_service.DeleteDataAgentRequest, + request_type=data_agent_service.UpdateDataAgentRequest, ): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -3742,18 +3761,18 @@ async def test_delete_data_agent_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_data_agent), "__call__" + type(client.transport.update_data_agent), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.delete_data_agent(request) + response = await client.update_data_agent(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = data_agent_service.DeleteDataAgentRequest() + request = data_agent_service.UpdateDataAgentRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -3761,27 +3780,27 @@ async def test_delete_data_agent_async( @pytest.mark.asyncio -async def test_delete_data_agent_async_from_dict(): - await test_delete_data_agent_async(request_type=dict) +async def test_update_data_agent_async_from_dict(): + await test_update_data_agent_async(request_type=dict) -def test_delete_data_agent_field_headers(): +def test_update_data_agent_field_headers(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = data_agent_service.DeleteDataAgentRequest() + request = data_agent_service.UpdateDataAgentRequest() - request.name = "name_value" + request.data_agent.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_data_agent), "__call__" + type(client.transport.update_data_agent), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_data_agent(request) + client.update_data_agent(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3792,30 +3811,30 @@ def test_delete_data_agent_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "data_agent.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_data_agent_field_headers_async(): +async def test_update_data_agent_field_headers_async(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = data_agent_service.DeleteDataAgentRequest() + request = data_agent_service.UpdateDataAgentRequest() - request.name = "name_value" + request.data_agent.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_data_agent), "__call__" + type(client.transport.update_data_agent), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.delete_data_agent(request) + await client.update_data_agent(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3826,37 +3845,53 @@ async def test_delete_data_agent_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "data_agent.name=name_value", ) in kw["metadata"] -def test_delete_data_agent_flattened(): +def test_update_data_agent_flattened(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_data_agent), "__call__" + type(client.transport.update_data_agent), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_data_agent( - name="name_value", + client.update_data_agent( + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].data_agent + mock_val = gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_delete_data_agent_flattened_error(): +def test_update_data_agent_flattened_error(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3864,21 +3899,28 @@ def test_delete_data_agent_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_data_agent( - data_agent_service.DeleteDataAgentRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_delete_data_agent_flattened_async(): - client = DataAgentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) + client.update_data_agent( + data_agent_service.UpdateDataAgentRequest(), + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_data_agent_flattened_async(): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_data_agent), "__call__" + type(client.transport.update_data_agent), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -3888,21 +3930,37 @@ async def test_delete_data_agent_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_data_agent( - name="name_value", + response = await client.update_data_agent( + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].data_agent + mock_val = gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_delete_data_agent_flattened_error_async(): +async def test_update_data_agent_flattened_error_async(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -3910,20 +3968,27 @@ async def test_delete_data_agent_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_data_agent( - data_agent_service.DeleteDataAgentRequest(), - name="name_value", + await client.update_data_agent( + data_agent_service.UpdateDataAgentRequest(), + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - iam_policy_pb2.GetIamPolicyRequest, + data_agent_service.UpdateDataAgentRequest, dict, ], ) -def test_get_iam_policy(request_type, transport: str = "grpc"): +def test_update_data_agent_sync(request_type, transport: str = "grpc"): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3934,27 +3999,31 @@ def test_get_iam_policy(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + with mock.patch.object( + type(client.transport.update_data_agent_sync), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b"etag_blob", + call.return_value = gcg_data_agent.DataAgent( + name="name_value", + display_name="display_name_value", + description="description_value", ) - response = client.get_iam_policy(request) + response = client.update_data_agent_sync(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.GetIamPolicyRequest() + request = data_agent_service.UpdateDataAgentRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b"etag_blob" + assert isinstance(response, gcg_data_agent.DataAgent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" -def test_get_iam_policy_non_empty_request_with_auto_populated_field(): +def test_update_data_agent_sync_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = DataAgentServiceClient( @@ -3965,24 +4034,22 @@ def test_get_iam_policy_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) + request = data_agent_service.UpdateDataAgentRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + with mock.patch.object( + type(client.transport.update_data_agent_sync), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_iam_policy(request=request) + client.update_data_agent_sync(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) + assert args[0] == data_agent_service.UpdateDataAgentRequest() -def test_get_iam_policy_use_cached_wrapped_rpc(): +def test_update_data_agent_sync_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3996,21 +4063,26 @@ def test_get_iam_policy_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_iam_policy in client._transport._wrapped_methods + assert ( + client._transport.update_data_agent_sync + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_data_agent_sync + ] = mock_rpc request = {} - client.get_iam_policy(request) + client.update_data_agent_sync(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_iam_policy(request) + client.update_data_agent_sync(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4018,7 +4090,7 @@ def test_get_iam_policy_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_iam_policy_async_use_cached_wrapped_rpc( +async def test_update_data_agent_sync_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4035,7 +4107,7 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_iam_policy + client._client._transport.update_data_agent_sync in client._client._transport._wrapped_methods ) @@ -4043,16 +4115,16 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_iam_policy + client._client._transport.update_data_agent_sync ] = mock_rpc request = {} - await client.get_iam_policy(request) + await client.update_data_agent_sync(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_iam_policy(request) + await client.update_data_agent_sync(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4060,8 +4132,9 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_iam_policy_async( - transport: str = "grpc_asyncio", request_type=iam_policy_pb2.GetIamPolicyRequest +async def test_update_data_agent_sync_async( + transport: str = "grpc_asyncio", + request_type=data_agent_service.UpdateDataAgentRequest, ): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -4073,48 +4146,54 @@ async def test_get_iam_policy_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + with mock.patch.object( + type(client.transport.update_data_agent_sync), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy( - version=774, - etag=b"etag_blob", + gcg_data_agent.DataAgent( + name="name_value", + display_name="display_name_value", + description="description_value", ) ) - response = await client.get_iam_policy(request) + response = await client.update_data_agent_sync(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.GetIamPolicyRequest() + request = data_agent_service.UpdateDataAgentRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b"etag_blob" + assert isinstance(response, gcg_data_agent.DataAgent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" @pytest.mark.asyncio -async def test_get_iam_policy_async_from_dict(): - await test_get_iam_policy_async(request_type=dict) +async def test_update_data_agent_sync_async_from_dict(): + await test_update_data_agent_sync_async(request_type=dict) -def test_get_iam_policy_field_headers(): +def test_update_data_agent_sync_field_headers(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() + request = data_agent_service.UpdateDataAgentRequest() - request.resource = "resource_value" + request.data_agent.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request) + with mock.patch.object( + type(client.transport.update_data_agent_sync), "__call__" + ) as call: + call.return_value = gcg_data_agent.DataAgent() + client.update_data_agent_sync(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4125,26 +4204,30 @@ def test_get_iam_policy_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource=resource_value", + "data_agent.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): +async def test_update_data_agent_sync_field_headers_async(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() + request = data_agent_service.UpdateDataAgentRequest() - request.resource = "resource_value" + request.data_agent.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.get_iam_policy(request) + with mock.patch.object( + type(client.transport.update_data_agent_sync), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcg_data_agent.DataAgent() + ) + await client.update_data_agent_sync(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4155,52 +4238,53 @@ async def test_get_iam_policy_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource=resource_value", + "data_agent.name=name_value", ) in kw["metadata"] -def test_get_iam_policy_from_dict_foreign(): - client = DataAgentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -def test_get_iam_policy_flattened(): +def test_update_data_agent_sync_flattened(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + with mock.patch.object( + type(client.transport.update_data_agent_sync), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() + call.return_value = gcg_data_agent.DataAgent() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_iam_policy( - resource="resource_value", + client.update_data_agent_sync( + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = "resource_value" + arg = args[0].data_agent + mock_val = gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_get_iam_policy_flattened_error(): +def test_update_data_agent_sync_flattened_error(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4208,41 +4292,68 @@ def test_get_iam_policy_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource="resource_value", + client.update_data_agent_sync( + data_agent_service.UpdateDataAgentRequest(), + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_get_iam_policy_flattened_async(): +async def test_update_data_agent_sync_flattened_async(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + with mock.patch.object( + type(client.transport.update_data_agent_sync), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() + call.return_value = gcg_data_agent.DataAgent() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcg_data_agent.DataAgent() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_iam_policy( - resource="resource_value", + response = await client.update_data_agent_sync( + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = "resource_value" + arg = args[0].data_agent + mock_val = gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_get_iam_policy_flattened_error_async(): +async def test_update_data_agent_sync_flattened_error_async(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -4250,20 +4361,27 @@ async def test_get_iam_policy_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource="resource_value", - ) - - -@pytest.mark.parametrize( - "request_type", + await client.update_data_agent_sync( + data_agent_service.UpdateDataAgentRequest(), + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", [ - iam_policy_pb2.SetIamPolicyRequest, + data_agent_service.DeleteDataAgentRequest, dict, ], ) -def test_set_iam_policy(request_type, transport: str = "grpc"): +def test_delete_data_agent(request_type, transport: str = "grpc"): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4274,27 +4392,24 @@ def test_set_iam_policy(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_data_agent), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b"etag_blob", - ) - response = client.set_iam_policy(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_data_agent(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.SetIamPolicyRequest() + request = data_agent_service.DeleteDataAgentRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b"etag_blob" + assert isinstance(response, future.Future) -def test_set_iam_policy_non_empty_request_with_auto_populated_field(): +def test_delete_data_agent_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = DataAgentServiceClient( @@ -4305,24 +4420,26 @@ def test_set_iam_policy_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", + request = data_agent_service.DeleteDataAgentRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_data_agent), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.set_iam_policy(request=request) + client.delete_data_agent(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", + assert args[0] == data_agent_service.DeleteDataAgentRequest( + name="name_value", ) -def test_set_iam_policy_use_cached_wrapped_rpc(): +def test_delete_data_agent_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4336,21 +4453,28 @@ def test_set_iam_policy_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.set_iam_policy in client._transport._wrapped_methods + assert client._transport.delete_data_agent in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + client._transport._wrapped_methods[ + client._transport.delete_data_agent + ] = mock_rpc request = {} - client.set_iam_policy(request) + client.delete_data_agent(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.set_iam_policy(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_data_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4358,7 +4482,7 @@ def test_set_iam_policy_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_set_iam_policy_async_use_cached_wrapped_rpc( +async def test_delete_data_agent_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4375,7 +4499,7 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.set_iam_policy + client._client._transport.delete_data_agent in client._client._transport._wrapped_methods ) @@ -4383,16 +4507,21 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.set_iam_policy + client._client._transport.delete_data_agent ] = mock_rpc request = {} - await client.set_iam_policy(request) + await client.delete_data_agent(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.set_iam_policy(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_data_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4400,8 +4529,9 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_set_iam_policy_async( - transport: str = "grpc_asyncio", request_type=iam_policy_pb2.SetIamPolicyRequest +async def test_delete_data_agent_async( + transport: str = "grpc_asyncio", + request_type=data_agent_service.DeleteDataAgentRequest, ): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -4413,48 +4543,47 @@ async def test_set_iam_policy_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_data_agent), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy( - version=774, - etag=b"etag_blob", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.set_iam_policy(request) + response = await client.delete_data_agent(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.SetIamPolicyRequest() + request = data_agent_service.DeleteDataAgentRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b"etag_blob" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_set_iam_policy_async_from_dict(): - await test_set_iam_policy_async(request_type=dict) +async def test_delete_data_agent_async_from_dict(): + await test_delete_data_agent_async(request_type=dict) -def test_set_iam_policy_field_headers(): +def test_delete_data_agent_field_headers(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() + request = data_agent_service.DeleteDataAgentRequest() - request.resource = "resource_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request) + with mock.patch.object( + type(client.transport.delete_data_agent), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_data_agent(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4465,26 +4594,30 @@ def test_set_iam_policy_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource=resource_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): +async def test_delete_data_agent_field_headers_async(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() + request = data_agent_service.DeleteDataAgentRequest() - request.resource = "resource_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.set_iam_policy(request) + with mock.patch.object( + type(client.transport.delete_data_agent), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_data_agent(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4495,53 +4628,37 @@ async def test_set_iam_policy_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource=resource_value", + "name=name_value", ) in kw["metadata"] -def test_set_iam_policy_from_dict_foreign(): - client = DataAgentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - "update_mask": field_mask_pb2.FieldMask(paths=["paths_value"]), - } - ) - call.assert_called() - - -def test_set_iam_policy_flattened(): +def test_delete_data_agent_flattened(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_data_agent), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.set_iam_policy( - resource="resource_value", + client.delete_data_agent( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = "resource_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_set_iam_policy_flattened_error(): +def test_delete_data_agent_flattened_error(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4549,41 +4666,45 @@ def test_set_iam_policy_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource="resource_value", + client.delete_data_agent( + data_agent_service.DeleteDataAgentRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_set_iam_policy_flattened_async(): +async def test_delete_data_agent_flattened_async(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_data_agent), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() + call.return_value = operations_pb2.Operation(name="operations/op") - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.set_iam_policy( - resource="resource_value", + response = await client.delete_data_agent( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = "resource_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_set_iam_policy_flattened_error_async(): +async def test_delete_data_agent_flattened_error_async(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -4591,283 +4712,84 @@ async def test_set_iam_policy_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource="resource_value", + await client.delete_data_agent( + data_agent_service.DeleteDataAgentRequest(), + name="name_value", ) -def test_list_data_agents_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataAgentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() +@pytest.mark.parametrize( + "request_type", + [ + data_agent_service.DeleteDataAgentRequest, + dict, + ], +) +def test_delete_data_agent_sync(request_type, transport: str = "grpc"): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Ensure method has been cached - assert client._transport.list_data_agents in client._transport._wrapped_methods + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.list_data_agents - ] = mock_rpc - - request = {} - client.list_data_agents(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_agent_sync), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_data_agent_sync(request) # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_data_agents(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_data_agents_rest_required_fields( - request_type=data_agent_service.ListDataAgentsRequest, -): - transport_class = transports.DataAgentServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_data_agents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_data_agents._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - "show_deleted", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = DataAgentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = data_agent_service.ListDataAgentsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = data_agent_service.ListDataAgentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_data_agents(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_data_agents_rest_unset_required_fields(): - transport = transports.DataAgentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_data_agents._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - "showDeleted", - ) - ) - & set(("parent",)) - ) - - -def test_list_data_agents_rest_flattened(): - client = DataAgentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = data_agent_service.ListDataAgentsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = data_agent_service.ListDataAgentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_data_agents(**mock_args) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_agent_service.DeleteDataAgentRequest() + assert args[0] == request - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*}/dataAgents" - % client.transport._host, - args[1], - ) + # Establish that the response is the type that we expect. + assert response is None -def test_list_data_agents_rest_flattened_error(transport: str = "rest"): +def test_delete_data_agent_sync_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="grpc", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_data_agents( - data_agent_service.ListDataAgentsRequest(), - parent="parent_value", - ) - - -def test_list_data_agents_rest_pager(transport: str = "rest"): - client = DataAgentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_agent_service.DeleteDataAgentRequest( + name="name_value", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - data_agent_service.ListDataAgentsResponse( - data_agents=[ - data_agent.DataAgent(), - data_agent.DataAgent(), - data_agent.DataAgent(), - ], - next_page_token="abc", - ), - data_agent_service.ListDataAgentsResponse( - data_agents=[], - next_page_token="def", - ), - data_agent_service.ListDataAgentsResponse( - data_agents=[ - data_agent.DataAgent(), - ], - next_page_token="ghi", - ), - data_agent_service.ListDataAgentsResponse( - data_agents=[ - data_agent.DataAgent(), - data_agent.DataAgent(), - ], - ), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_agent_sync), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - data_agent_service.ListDataAgentsResponse.to_json(x) for x in response + client.delete_data_agent_sync(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_agent_service.DeleteDataAgentRequest( + name="name_value", ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_data_agents(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, data_agent.DataAgent) for i in results) - - pages = list(client.list_data_agents(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token -def test_list_accessible_data_agents_rest_use_cached_wrapped_rpc(): +def test_delete_data_agent_sync_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -4876,7 +4798,7 @@ def test_list_accessible_data_agents_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_accessible_data_agents + client._transport.delete_data_agent_sync in client._transport._wrapped_methods ) @@ -4886,262 +4808,320 @@ def test_list_accessible_data_agents_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_accessible_data_agents + client._transport.delete_data_agent_sync ] = mock_rpc - request = {} - client.list_accessible_data_agents(request) + client.delete_data_agent_sync(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_accessible_data_agents(request) + client.delete_data_agent_sync(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_accessible_data_agents_rest_required_fields( - request_type=data_agent_service.ListAccessibleDataAgentsRequest, +@pytest.mark.asyncio +async def test_delete_data_agent_sync_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.DataAgentServiceRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.delete_data_agent_sync + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_accessible_data_agents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_data_agent_sync + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.delete_data_agent_sync(request) - jsonified_request["parent"] = "parent_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_accessible_data_agents._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "creator_filter", - "filter", - "order_by", - "page_size", - "page_token", - "show_deleted", - ) + await client.delete_data_agent_sync(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_data_agent_sync_async( + transport: str = "grpc_asyncio", + request_type=data_agent_service.DeleteDataAgentRequest, +): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_agent_sync), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_data_agent_sync(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_agent_service.DeleteDataAgentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +@pytest.mark.asyncio +async def test_delete_data_agent_sync_async_from_dict(): + await test_delete_data_agent_sync_async(request_type=dict) + + +def test_delete_data_agent_sync_field_headers(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = data_agent_service.ListAccessibleDataAgentsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_agent_service.DeleteDataAgentRequest() - # Convert return value to protobuf type - return_value = data_agent_service.ListAccessibleDataAgentsResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) + request.name = "name_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_agent_sync), "__call__" + ) as call: + call.return_value = None + client.delete_data_agent_sync(request) - response = client.list_accessible_data_agents(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_list_accessible_data_agents_rest_unset_required_fields(): - transport = transports.DataAgentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_delete_data_agent_sync_field_headers_async(): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.list_accessible_data_agents._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "creatorFilter", - "filter", - "orderBy", - "pageSize", - "pageToken", - "showDeleted", - ) - ) - & set(("parent",)) - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_agent_service.DeleteDataAgentRequest() + request.name = "name_value" -def test_list_accessible_data_agents_rest_flattened(): - client = DataAgentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_agent_sync), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_data_agent_sync(request) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = data_agent_service.ListAccessibleDataAgentsResponse() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = data_agent_service.ListAccessibleDataAgentsResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} +def test_delete_data_agent_sync_flattened(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - client.list_accessible_data_agents(**mock_args) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_agent_sync), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_data_agent_sync( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*}/dataAgents:listAccessible" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_list_accessible_data_agents_rest_flattened_error(transport: str = "rest"): +def test_delete_data_agent_sync_flattened_error(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_accessible_data_agents( - data_agent_service.ListAccessibleDataAgentsRequest(), - parent="parent_value", + client.delete_data_agent_sync( + data_agent_service.DeleteDataAgentRequest(), + name="name_value", ) -def test_list_accessible_data_agents_rest_pager(transport: str = "rest"): - client = DataAgentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +@pytest.mark.asyncio +async def test_delete_data_agent_sync_flattened_async(): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - data_agent_service.ListAccessibleDataAgentsResponse( - data_agents=[ - data_agent.DataAgent(), - data_agent.DataAgent(), - data_agent.DataAgent(), - ], - next_page_token="abc", - ), - data_agent_service.ListAccessibleDataAgentsResponse( - data_agents=[], - next_page_token="def", - ), - data_agent_service.ListAccessibleDataAgentsResponse( - data_agents=[ - data_agent.DataAgent(), - ], - next_page_token="ghi", - ), - data_agent_service.ListAccessibleDataAgentsResponse( - data_agents=[ - data_agent.DataAgent(), - data_agent.DataAgent(), - ], - ), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_agent_sync), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_data_agent_sync( + name="name_value", ) - # Two responses for two calls - response = response + response - # Wrap the values into proper Response objs - response = tuple( - data_agent_service.ListAccessibleDataAgentsResponse.to_json(x) - for x in response + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_data_agent_sync_flattened_error_async(): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_data_agent_sync( + data_agent_service.DeleteDataAgentRequest(), + name="name_value", ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - sample_request = {"parent": "projects/sample1/locations/sample2"} - pager = client.list_accessible_data_agents(request=sample_request) +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy(request_type, transport: str = "grpc"): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, data_agent.DataAgent) for i in results) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - pages = list(client.list_accessible_data_agents(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.GetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" -def test_get_data_agent_rest_use_cached_wrapped_rpc(): +def test_get_iam_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_iam_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + +def test_get_iam_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -5149,179 +5129,339 @@ def test_get_data_agent_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_data_agent in client._transport._wrapped_methods + assert client._transport.get_iam_policy in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_data_agent] = mock_rpc - + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc request = {} - client.get_data_agent(request) + client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_data_agent(request) + client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_data_agent_rest_required_fields( - request_type=data_agent_service.GetDataAgentRequest, +@pytest.mark.asyncio +async def test_get_iam_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.DataAgentServiceRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.get_iam_policy + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_data_agent._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_iam_policy + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.get_iam_policy(request) - jsonified_request["name"] = "name_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_data_agent._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + await client.get_iam_policy(request) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - client = DataAgentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = data_agent.DataAgent() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result +@pytest.mark.asyncio +async def test_get_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - response_value = Response() - response_value.status_code = 200 + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Convert return value to protobuf type - return_value = data_agent.DataAgent.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.get_iam_policy(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.GetIamPolicyRequest() + assert args[0] == request - response = client.get_data_agent(request) + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) -def test_get_data_agent_rest_unset_required_fields(): - transport = transports.DataAgentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials + +def test_get_iam_policy_field_headers(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.get_data_agent._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = "resource_value" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request) -def test_get_data_agent_rest_flattened(): + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict_foreign(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = data_agent.DataAgent() - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/dataAgents/sample3" - } +def test_get_iam_policy_flattened(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - # get truthy value for each flattened field - mock_args = dict( - name="name_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_iam_policy( + resource="resource_value", ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = data_agent.DataAgent.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val - client.get_data_agent(**mock_args) + +def test_get_iam_policy_flattened_error(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource="resource_value", + ) + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_async(): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_iam_policy( + resource="resource_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/dataAgents/*}" - % client.transport._host, - args[1], + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_error_async(): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource="resource_value", ) -def test_get_data_agent_rest_flattened_error(transport: str = "rest"): +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy(request_type, transport: str = "grpc"): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_agent( - data_agent_service.GetDataAgentRequest(), - name="name_value", + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", ) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.SetIamPolicyRequest() + assert args[0] == request -def test_create_data_agent_rest_use_cached_wrapped_rpc(): + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.set_iam_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + +def test_set_iam_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -5329,209 +5469,268 @@ def test_create_data_agent_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_data_agent in client._transport._wrapped_methods + assert client._transport.set_iam_policy in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_data_agent - ] = mock_rpc - + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc request = {} - client.create_data_agent(request) + client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_set_iam_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 wrapper_fn.reset_mock() - client.create_data_agent(request) + # Ensure method has been cached + assert ( + client._client._transport.set_iam_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.set_iam_policy + ] = mock_rpc + + request = {} + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_data_agent_rest_required_fields( - request_type=data_agent_service.CreateDataAgentRequest, +@pytest.mark.asyncio +async def test_set_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.SetIamPolicyRequest ): - transport_class = transports.DataAgentServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - # verify fields with default values are dropped + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_data_agent._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) - # verify required fields with default values are now present + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.SetIamPolicyRequest() + assert args[0] == request - jsonified_request["parent"] = "parent_value" + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_data_agent._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "data_agent_id", - "request_id", - ) - ) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + +def test_set_iam_policy_field_headers(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + request.resource = "resource_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request) - response = client.create_data_agent(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] -def test_create_data_agent_rest_unset_required_fields(): - transport = transports.DataAgentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.create_data_agent._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "dataAgentId", - "requestId", - ) - ) - & set( - ( - "parent", - "dataAgent", - ) - ) - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource_value" -def test_create_data_agent_rest_flattened(): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict_foreign(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + "update_mask": field_mask_pb2.FieldMask(paths=["paths_value"]), + } + ) + call.assert_called() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} +def test_set_iam_policy_flattened(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - data_agent=gcg_data_agent.DataAgent( - data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( - staging_context=context.Context( - system_instruction="system_instruction_value" - ) - ) - ), - data_agent_id="data_agent_id_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_iam_policy( + resource="resource_value", ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_data_agent(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*}/dataAgents" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val -def test_create_data_agent_rest_flattened_error(transport: str = "rest"): +def test_set_iam_policy_flattened_error(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_data_agent( - data_agent_service.CreateDataAgentRequest(), - parent="parent_value", - data_agent=gcg_data_agent.DataAgent( - data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( - staging_context=context.Context( - system_instruction="system_instruction_value" - ) - ) - ), - data_agent_id="data_agent_id_value", + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource="resource_value", ) -def test_update_data_agent_rest_use_cached_wrapped_rpc(): +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_async(): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_iam_policy( + resource="resource_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_error_async(): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource="resource_value", + ) + + +def test_list_data_agents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5545,7 +5744,7 @@ def test_update_data_agent_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_data_agent in client._transport._wrapped_methods + assert client._transport.list_data_agents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -5553,32 +5752,29 @@ def test_update_data_agent_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_data_agent + client._transport.list_data_agents ] = mock_rpc request = {} - client.update_data_agent(request) + client.list_data_agents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_data_agent(request) + client.list_data_agents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_data_agent_rest_required_fields( - request_type=data_agent_service.UpdateDataAgentRequest, +def test_list_data_agents_rest_required_fields( + request_type=data_agent_service.ListDataAgentsRequest, ): transport_class = transports.DataAgentServiceRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5589,24 +5785,31 @@ def test_update_data_agent_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_data_agent._get_unset_required_fields(jsonified_request) + ).list_data_agents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_data_agent._get_unset_required_fields(jsonified_request) + ).list_data_agents._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "request_id", - "update_mask", + "filter", + "order_by", + "page_size", + "page_token", + "show_deleted", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5615,7 +5818,7 @@ def test_update_data_agent_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = data_agent_service.ListDataAgentsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5627,45 +5830,50 @@ def test_update_data_agent_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_agent_service.ListDataAgentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_data_agent(request) + response = client.list_data_agents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_data_agent_rest_unset_required_fields(): +def test_list_data_agents_rest_unset_required_fields(): transport = transports.DataAgentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_data_agent._get_unset_required_fields({}) + unset_fields = transport.list_data_agents._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "requestId", - "updateMask", + "filter", + "orderBy", + "pageSize", + "pageToken", + "showDeleted", ) ) - & set(("dataAgent",)) + & set(("parent",)) ) -def test_update_data_agent_rest_flattened(): +def test_list_data_agents_rest_flattened(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5674,50 +5882,41 @@ def test_update_data_agent_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = data_agent_service.ListDataAgentsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "data_agent": { - "name": "projects/sample1/locations/sample2/dataAgents/sample3" - } - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - data_agent=gcg_data_agent.DataAgent( - data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( - staging_context=context.Context( - system_instruction="system_instruction_value" - ) - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_agent_service.ListDataAgentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_data_agent(**mock_args) + client.list_data_agents(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{data_agent.name=projects/*/locations/*/dataAgents/*}" + "%s/v1alpha/{parent=projects/*/locations/*}/dataAgents" % client.transport._host, args[1], ) -def test_update_data_agent_rest_flattened_error(transport: str = "rest"): +def test_list_data_agents_rest_flattened_error(transport: str = "rest"): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5726,20 +5925,76 @@ def test_update_data_agent_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_data_agent( - data_agent_service.UpdateDataAgentRequest(), - data_agent=gcg_data_agent.DataAgent( - data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( - staging_context=context.Context( - system_instruction="system_instruction_value" - ) - ) + client.list_data_agents( + data_agent_service.ListDataAgentsRequest(), + parent="parent_value", + ) + + +def test_list_data_agents_rest_pager(transport: str = "rest"): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + data_agent_service.ListDataAgentsResponse( + data_agents=[ + data_agent.DataAgent(), + data_agent.DataAgent(), + data_agent.DataAgent(), + ], + next_page_token="abc", ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + data_agent_service.ListDataAgentsResponse( + data_agents=[], + next_page_token="def", + ), + data_agent_service.ListDataAgentsResponse( + data_agents=[ + data_agent.DataAgent(), + ], + next_page_token="ghi", + ), + data_agent_service.ListDataAgentsResponse( + data_agents=[ + data_agent.DataAgent(), + data_agent.DataAgent(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + data_agent_service.ListDataAgentsResponse.to_json(x) for x in response ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + sample_request = {"parent": "projects/sample1/locations/sample2"} -def test_delete_data_agent_rest_use_cached_wrapped_rpc(): + pager = client.list_data_agents(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, data_agent.DataAgent) for i in results) + + pages = list(client.list_data_agents(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_list_accessible_data_agents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5753,7 +6008,10 @@ def test_delete_data_agent_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_data_agent in client._transport._wrapped_methods + assert ( + client._transport.list_accessible_data_agents + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -5761,33 +6019,29 @@ def test_delete_data_agent_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_data_agent + client._transport.list_accessible_data_agents ] = mock_rpc request = {} - client.delete_data_agent(request) + client.list_accessible_data_agents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_data_agent(request) + client.list_accessible_data_agents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_data_agent_rest_required_fields( - request_type=data_agent_service.DeleteDataAgentRequest, +def test_list_accessible_data_agents_rest_required_fields( + request_type=data_agent_service.ListAccessibleDataAgentsRequest, ): transport_class = transports.DataAgentServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5798,23 +6052,32 @@ def test_delete_data_agent_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_data_agent._get_unset_required_fields(jsonified_request) + ).list_accessible_data_agents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_data_agent._get_unset_required_fields(jsonified_request) + ).list_accessible_data_agents._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set( + ( + "creator_filter", + "filter", + "order_by", + "page_size", + "page_token", + "show_deleted", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5823,7 +6086,7 @@ def test_delete_data_agent_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = data_agent_service.ListAccessibleDataAgentsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5835,36 +6098,53 @@ def test_delete_data_agent_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_agent_service.ListAccessibleDataAgentsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_data_agent(request) + response = client.list_accessible_data_agents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_data_agent_rest_unset_required_fields(): +def test_list_accessible_data_agents_rest_unset_required_fields(): transport = transports.DataAgentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_data_agent._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + unset_fields = transport.list_accessible_data_agents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "creatorFilter", + "filter", + "orderBy", + "pageSize", + "pageToken", + "showDeleted", + ) + ) + & set(("parent",)) + ) -def test_delete_data_agent_rest_flattened(): +def test_list_accessible_data_agents_rest_flattened(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5873,41 +6153,43 @@ def test_delete_data_agent_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = data_agent_service.ListAccessibleDataAgentsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/dataAgents/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_agent_service.ListAccessibleDataAgentsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_data_agent(**mock_args) + client.list_accessible_data_agents(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/dataAgents/*}" + "%s/v1alpha/{parent=projects/*/locations/*}/dataAgents:listAccessible" % client.transport._host, args[1], ) -def test_delete_data_agent_rest_flattened_error(transport: str = "rest"): +def test_list_accessible_data_agents_rest_flattened_error(transport: str = "rest"): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5916,13 +6198,77 @@ def test_delete_data_agent_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_data_agent( - data_agent_service.DeleteDataAgentRequest(), - name="name_value", + client.list_accessible_data_agents( + data_agent_service.ListAccessibleDataAgentsRequest(), + parent="parent_value", ) -def test_get_iam_policy_rest_use_cached_wrapped_rpc(): +def test_list_accessible_data_agents_rest_pager(transport: str = "rest"): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + data_agent_service.ListAccessibleDataAgentsResponse( + data_agents=[ + data_agent.DataAgent(), + data_agent.DataAgent(), + data_agent.DataAgent(), + ], + next_page_token="abc", + ), + data_agent_service.ListAccessibleDataAgentsResponse( + data_agents=[], + next_page_token="def", + ), + data_agent_service.ListAccessibleDataAgentsResponse( + data_agents=[ + data_agent.DataAgent(), + ], + next_page_token="ghi", + ), + data_agent_service.ListAccessibleDataAgentsResponse( + data_agents=[ + data_agent.DataAgent(), + data_agent.DataAgent(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + data_agent_service.ListAccessibleDataAgentsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_accessible_data_agents(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, data_agent.DataAgent) for i in results) + + pages = list(client.list_accessible_data_agents(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_data_agent_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5936,37 +6282,37 @@ def test_get_iam_policy_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_iam_policy in client._transport._wrapped_methods + assert client._transport.get_data_agent in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + client._transport._wrapped_methods[client._transport.get_data_agent] = mock_rpc request = {} - client.get_iam_policy(request) + client.get_data_agent(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_iam_policy(request) + client.get_data_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_iam_policy_rest_required_fields( - request_type=iam_policy_pb2.GetIamPolicyRequest, +def test_get_data_agent_rest_required_fields( + request_type=data_agent_service.GetDataAgentRequest, ): transport_class = transports.DataAgentServiceRestTransport request_init = {} - request_init["resource"] = "" + request_init["name"] = "" request = request_type(**request_init) - pb_request = request + pb_request = request_type.pb(request) jsonified_request = json.loads( json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) @@ -5975,21 +6321,21 @@ def test_get_iam_policy_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_iam_policy._get_unset_required_fields(jsonified_request) + ).get_data_agent._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["resource"] = "resource_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_iam_policy._get_unset_required_fields(jsonified_request) + ).get_data_agent._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == "resource_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5998,7 +6344,7 @@ def test_get_iam_policy_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() + return_value = data_agent.DataAgent() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6007,41 +6353,42 @@ def test_get_iam_policy_rest_required_fields( with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. - pb_request = request + pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_agent.DataAgent.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_iam_policy(request) + response = client.get_data_agent(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_iam_policy_rest_unset_required_fields(): +def test_get_data_agent_rest_unset_required_fields(): transport = transports.DataAgentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_iam_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("resource",))) + unset_fields = transport.get_data_agent._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_iam_policy_rest_flattened(): +def test_get_data_agent_rest_flattened(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6050,41 +6397,43 @@ def test_get_iam_policy_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() + return_value = data_agent.DataAgent() # get arguments that satisfy an http rule for this method sample_request = { - "resource": "projects/sample1/locations/sample2/dataAgents/sample3" + "name": "projects/sample1/locations/sample2/dataAgents/sample3" } # get truthy value for each flattened field mock_args = dict( - resource="resource_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_agent.DataAgent.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_iam_policy(**mock_args) + client.get_data_agent(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{resource=projects/*/locations/*/dataAgents/*}:getIamPolicy" + "%s/v1alpha/{name=projects/*/locations/*/dataAgents/*}" % client.transport._host, args[1], ) -def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): +def test_get_data_agent_rest_flattened_error(transport: str = "rest"): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6093,13 +6442,13 @@ def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource="resource_value", + client.get_data_agent( + data_agent_service.GetDataAgentRequest(), + name="name_value", ) -def test_set_iam_policy_rest_use_cached_wrapped_rpc(): +def test_create_data_agent_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6113,37 +6462,43 @@ def test_set_iam_policy_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.set_iam_policy in client._transport._wrapped_methods + assert client._transport.create_data_agent in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_data_agent + ] = mock_rpc request = {} - client.set_iam_policy(request) + client.create_data_agent(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.set_iam_policy(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_data_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_set_iam_policy_rest_required_fields( - request_type=iam_policy_pb2.SetIamPolicyRequest, +def test_create_data_agent_rest_required_fields( + request_type=data_agent_service.CreateDataAgentRequest, ): transport_class = transports.DataAgentServiceRestTransport request_init = {} - request_init["resource"] = "" + request_init["parent"] = "" request = request_type(**request_init) - pb_request = request + pb_request = request_type.pb(request) jsonified_request = json.loads( json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) @@ -6152,21 +6507,28 @@ def test_set_iam_policy_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_iam_policy._get_unset_required_fields(jsonified_request) + ).create_data_agent._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["resource"] = "resource_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_iam_policy._get_unset_required_fields(jsonified_request) + ).create_data_agent._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "data_agent_id", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == "resource_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6175,7 +6537,7 @@ def test_set_iam_policy_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6184,7 +6546,7 @@ def test_set_iam_policy_rest_required_fields( with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. - pb_request = request + pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", "method": "post", @@ -6195,38 +6557,42 @@ def test_set_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.set_iam_policy(request) + response = client.create_data_agent(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_set_iam_policy_rest_unset_required_fields(): +def test_create_data_agent_rest_unset_required_fields(): transport = transports.DataAgentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + unset_fields = transport.create_data_agent._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set( + ( + "dataAgentId", + "requestId", + ) + ) & set( ( - "resource", - "policy", + "parent", + "dataAgent", ) ) ) -def test_set_iam_policy_rest_flattened(): +def test_create_data_agent_rest_flattened(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6235,16 +6601,22 @@ def test_set_iam_policy_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "resource": "projects/sample1/locations/sample2/dataAgents/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - resource="resource_value", + parent="parent_value", + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + data_agent_id="data_agent_id_value", ) mock_args.update(sample_request) @@ -6256,20 +6628,20 @@ def test_set_iam_policy_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.set_iam_policy(**mock_args) + client.create_data_agent(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{resource=projects/*/locations/*/dataAgents/*}:setIamPolicy" + "%s/v1alpha/{parent=projects/*/locations/*}/dataAgents" % client.transport._host, args[1], ) -def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): +def test_create_data_agent_rest_flattened_error(transport: str = "rest"): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6278,168 +6650,1543 @@ def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource="resource_value", + client.create_data_agent( + data_agent_service.CreateDataAgentRequest(), + parent="parent_value", + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + data_agent_id="data_agent_id_value", ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DataAgentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): +def test_create_data_agent_sync_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.DataAgentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataAgentServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + transport="rest", ) - # It is an error to provide an api_key and a transport instance. - transport = transports.DataAgentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataAgentServiceClient( - client_options=options, - transport=transport, - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataAgentServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() + # Ensure method has been cached + assert ( + client._transport.create_data_agent_sync + in client._transport._wrapped_methods ) - # It is an error to provide scopes and a transport instance. - transport = transports.DataAgentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataAgentServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client._transport._wrapped_methods[ + client._transport.create_data_agent_sync + ] = mock_rpc + request = {} + client.create_data_agent_sync(request) -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataAgentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = DataAgentServiceClient(transport=transport) - assert client.transport is transport + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.create_data_agent_sync(request) -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataAgentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - transport = transports.DataAgentServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel +def test_create_data_agent_sync_rest_required_fields( + request_type=data_agent_service.CreateDataAgentRequest, +): + transport_class = transports.DataAgentServiceRestTransport -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataAgentServiceGrpcTransport, - transports.DataAgentServiceGrpcAsyncIOTransport, - transports.DataAgentServiceRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + # verify fields with default values are dropped -def test_transport_kind_grpc(): - transport = DataAgentServiceClient.get_transport_class("grpc")( + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" + ).create_data_agent_sync._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" -def test_initialize_client_w_grpc(): - client = DataAgentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_data_agent_sync._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "data_agent_id", + "request_id", + ) ) - assert client is not None + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_data_agents_empty_call_grpc(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_data_agents), "__call__") as call: - call.return_value = data_agent_service.ListDataAgentsResponse() - client.list_data_agents(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_agent_service.ListDataAgentsRequest() - - assert args[0] == request_msg + # Designate an appropriate value for the returned response. + return_value = gcg_data_agent.DataAgent() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_accessible_data_agents_empty_call_grpc(): - client = DataAgentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Convert return value to protobuf type + return_value = gcg_data_agent.DataAgent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_accessible_data_agents), "__call__" - ) as call: - call.return_value = data_agent_service.ListAccessibleDataAgentsResponse() - client.list_accessible_data_agents(request=None) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_agent_service.ListAccessibleDataAgentsRequest() + response = client.create_data_agent_sync(request) - assert args[0] == request_msg + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_data_agent_empty_call_grpc(): - client = DataAgentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", +def test_create_data_agent_sync_rest_unset_required_fields(): + transport = transports.DataAgentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_data_agent_sync._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "dataAgentId", + "requestId", + ) + ) + & set( + ( + "parent", + "dataAgent", + ) + ) + ) + + +def test_create_data_agent_sync_rest_flattened(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcg_data_agent.DataAgent() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + data_agent_id="data_agent_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcg_data_agent.DataAgent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_data_agent_sync(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*}/dataAgents:createSync" + % client.transport._host, + args[1], + ) + + +def test_create_data_agent_sync_rest_flattened_error(transport: str = "rest"): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_data_agent_sync( + data_agent_service.CreateDataAgentRequest(), + parent="parent_value", + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + data_agent_id="data_agent_id_value", + ) + + +def test_update_data_agent_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_data_agent in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_data_agent + ] = mock_rpc + + request = {} + client.update_data_agent(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_data_agent(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_data_agent_rest_required_fields( + request_type=data_agent_service.UpdateDataAgentRequest, +): + transport_class = transports.DataAgentServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_data_agent._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_data_agent._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_data_agent(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_data_agent_rest_unset_required_fields(): + transport = transports.DataAgentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_data_agent._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set(("dataAgent",)) + ) + + +def test_update_data_agent_rest_flattened(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "data_agent": { + "name": "projects/sample1/locations/sample2/dataAgents/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_data_agent(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{data_agent.name=projects/*/locations/*/dataAgents/*}" + % client.transport._host, + args[1], + ) + + +def test_update_data_agent_rest_flattened_error(transport: str = "rest"): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_agent( + data_agent_service.UpdateDataAgentRequest(), + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_data_agent_sync_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_data_agent_sync + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_data_agent_sync + ] = mock_rpc + + request = {} + client.update_data_agent_sync(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_data_agent_sync(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_data_agent_sync_rest_required_fields( + request_type=data_agent_service.UpdateDataAgentRequest, +): + transport_class = transports.DataAgentServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_data_agent_sync._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_data_agent_sync._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcg_data_agent.DataAgent() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcg_data_agent.DataAgent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_data_agent_sync(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_data_agent_sync_rest_unset_required_fields(): + transport = transports.DataAgentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_data_agent_sync._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set(("dataAgent",)) + ) + + +def test_update_data_agent_sync_rest_flattened(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcg_data_agent.DataAgent() + + # get arguments that satisfy an http rule for this method + sample_request = { + "data_agent": { + "name": "projects/sample1/locations/sample2/dataAgents/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcg_data_agent.DataAgent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_data_agent_sync(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{data_agent.name=projects/*/locations/*/dataAgents/*}:updateSync" + % client.transport._host, + args[1], + ) + + +def test_update_data_agent_sync_rest_flattened_error(transport: str = "rest"): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_agent_sync( + data_agent_service.UpdateDataAgentRequest(), + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_data_agent_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_data_agent in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_data_agent + ] = mock_rpc + + request = {} + client.delete_data_agent(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_data_agent(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_data_agent_rest_required_fields( + request_type=data_agent_service.DeleteDataAgentRequest, +): + transport_class = transports.DataAgentServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_data_agent._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_data_agent._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_data_agent(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_data_agent_rest_unset_required_fields(): + transport = transports.DataAgentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_data_agent._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +def test_delete_data_agent_rest_flattened(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataAgents/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_data_agent(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/dataAgents/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_data_agent_rest_flattened_error(transport: str = "rest"): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_data_agent( + data_agent_service.DeleteDataAgentRequest(), + name="name_value", + ) + + +def test_delete_data_agent_sync_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_data_agent_sync + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_data_agent_sync + ] = mock_rpc + + request = {} + client.delete_data_agent_sync(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_data_agent_sync(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_data_agent_sync_rest_required_fields( + request_type=data_agent_service.DeleteDataAgentRequest, +): + transport_class = transports.DataAgentServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_data_agent_sync._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_data_agent_sync._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_data_agent_sync(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_data_agent_sync_rest_unset_required_fields(): + transport = transports.DataAgentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_data_agent_sync._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +def test_delete_data_agent_sync_rest_flattened(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataAgents/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_data_agent_sync(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/dataAgents/*}:deleteSync" + % client.transport._host, + args[1], + ) + + +def test_delete_data_agent_sync_rest_flattened_error(transport: str = "rest"): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_data_agent_sync( + data_agent_service.DeleteDataAgentRequest(), + name="name_value", + ) + + +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_iam_policy_rest_required_fields( + request_type=iam_policy_pb2.GetIamPolicyRequest, +): + transport_class = transports.DataAgentServiceRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_iam_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.DataAgentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("resource",))) + + +def test_get_iam_policy_rest_flattened(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "resource": "projects/sample1/locations/sample2/dataAgents/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + resource="resource_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{resource=projects/*/locations/*/dataAgents/*}:getIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource="resource_value", + ) + + +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_iam_policy_rest_required_fields( + request_type=iam_policy_pb2.SetIamPolicyRequest, +): + transport_class = transports.DataAgentServiceRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_iam_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.DataAgentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "resource", + "policy", + ) + ) + ) + + +def test_set_iam_policy_rest_flattened(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "resource": "projects/sample1/locations/sample2/dataAgents/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + resource="resource_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{resource=projects/*/locations/*/dataAgents/*}:setIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource="resource_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DataAgentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DataAgentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataAgentServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DataAgentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataAgentServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataAgentServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DataAgentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataAgentServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataAgentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DataAgentServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataAgentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DataAgentServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataAgentServiceGrpcTransport, + transports.DataAgentServiceGrpcAsyncIOTransport, + transports.DataAgentServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = DataAgentServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_data_agents_empty_call_grpc(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_data_agents), "__call__") as call: + call.return_value = data_agent_service.ListDataAgentsResponse() + client.list_data_agents(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_agent_service.ListDataAgentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_accessible_data_agents_empty_call_grpc(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_accessible_data_agents), "__call__" + ) as call: + call.return_value = data_agent_service.ListAccessibleDataAgentsResponse() + client.list_accessible_data_agents(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_agent_service.ListAccessibleDataAgentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_data_agent_empty_call_grpc(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call, and fake the request. @@ -6478,6 +8225,29 @@ def test_create_data_agent_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_data_agent_sync_empty_call_grpc(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_agent_sync), "__call__" + ) as call: + call.return_value = gcg_data_agent.DataAgent() + client.create_data_agent_sync(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_agent_service.CreateDataAgentRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_update_data_agent_empty_call_grpc(): @@ -6501,6 +8271,29 @@ def test_update_data_agent_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_data_agent_sync_empty_call_grpc(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_agent_sync), "__call__" + ) as call: + call.return_value = gcg_data_agent.DataAgent() + client.update_data_agent_sync(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_agent_service.UpdateDataAgentRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_delete_data_agent_empty_call_grpc(): @@ -6524,6 +8317,29 @@ def test_delete_data_agent_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_data_agent_sync_empty_call_grpc(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_agent_sync), "__call__" + ) as call: + call.return_value = None + client.delete_data_agent_sync(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_agent_service.DeleteDataAgentRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_get_iam_policy_empty_call_grpc(): @@ -6628,12 +8444,126 @@ async def test_list_accessible_data_agents_empty_call_grpc_asyncio(): unreachable=["unreachable_value"], ) ) - await client.list_accessible_data_agents(request=None) + await client.list_accessible_data_agents(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_agent_service.ListAccessibleDataAgentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_data_agent_empty_call_grpc_asyncio(): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_data_agent), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_agent.DataAgent( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + ) + await client.get_data_agent(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_agent_service.GetDataAgentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_data_agent_empty_call_grpc_asyncio(): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_agent), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_data_agent(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_agent_service.CreateDataAgentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_data_agent_sync_empty_call_grpc_asyncio(): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_agent_sync), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcg_data_agent.DataAgent( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + ) + await client.create_data_agent_sync(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_agent_service.CreateDataAgentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_data_agent_empty_call_grpc_asyncio(): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_agent), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_data_agent(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = data_agent_service.ListAccessibleDataAgentsRequest() + request_msg = data_agent_service.UpdateDataAgentRequest() assert args[0] == request_msg @@ -6641,28 +8571,30 @@ async def test_list_accessible_data_agents_empty_call_grpc_asyncio(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_get_data_agent_empty_call_grpc_asyncio(): +async def test_update_data_agent_sync_empty_call_grpc_asyncio(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), transport="grpc_asyncio", ) # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_data_agent), "__call__") as call: + with mock.patch.object( + type(client.transport.update_data_agent_sync), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - data_agent.DataAgent( + gcg_data_agent.DataAgent( name="name_value", display_name="display_name_value", description="description_value", ) ) - await client.get_data_agent(request=None) + await client.update_data_agent_sync(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = data_agent_service.GetDataAgentRequest() + request_msg = data_agent_service.UpdateDataAgentRequest() assert args[0] == request_msg @@ -6670,7 +8602,7 @@ async def test_get_data_agent_empty_call_grpc_asyncio(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_create_data_agent_empty_call_grpc_asyncio(): +async def test_delete_data_agent_empty_call_grpc_asyncio(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), transport="grpc_asyncio", @@ -6678,18 +8610,18 @@ async def test_create_data_agent_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_data_agent), "__call__" + type(client.transport.delete_data_agent), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - await client.create_data_agent(request=None) + await client.delete_data_agent(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = data_agent_service.CreateDataAgentRequest() + request_msg = data_agent_service.DeleteDataAgentRequest() assert args[0] == request_msg @@ -6697,7 +8629,7 @@ async def test_create_data_agent_empty_call_grpc_asyncio(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_update_data_agent_empty_call_grpc_asyncio(): +async def test_delete_data_agent_sync_empty_call_grpc_asyncio(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), transport="grpc_asyncio", @@ -6705,18 +8637,16 @@ async def test_update_data_agent_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_data_agent), "__call__" + type(client.transport.delete_data_agent_sync), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.update_data_agent(request=None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_data_agent_sync(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = data_agent_service.UpdateDataAgentRequest() + request_msg = data_agent_service.DeleteDataAgentRequest() assert args[0] == request_msg @@ -6724,26 +8654,27 @@ async def test_update_data_agent_empty_call_grpc_asyncio(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_delete_data_agent_empty_call_grpc_asyncio(): +async def test_get_iam_policy_empty_call_grpc_asyncio(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), transport="grpc_asyncio", ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_agent), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) ) - await client.delete_data_agent(request=None) + await client.get_iam_policy(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = data_agent_service.DeleteDataAgentRequest() + request_msg = iam_policy_pb2.GetIamPolicyRequest() assert args[0] == request_msg @@ -6751,14 +8682,14 @@ async def test_delete_data_agent_empty_call_grpc_asyncio(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_get_iam_policy_empty_call_grpc_asyncio(): +async def test_set_iam_policy_empty_call_grpc_asyncio(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), transport="grpc_asyncio", ) # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policy_pb2.Policy( @@ -6766,59 +8697,303 @@ async def test_get_iam_policy_empty_call_grpc_asyncio(): etag=b"etag_blob", ) ) - await client.get_iam_policy(request=None) + await client.set_iam_policy(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() + request_msg = iam_policy_pb2.SetIamPolicyRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = DataAgentServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_data_agents_rest_bad_request( + request_type=data_agent_service.ListDataAgentsRequest, +): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_data_agents(request) + + +@pytest.mark.parametrize( + "request_type", + [ + data_agent_service.ListDataAgentsRequest, + dict, + ], +) +def test_list_data_agents_rest_call_success(request_type): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_agent_service.ListDataAgentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_agent_service.ListDataAgentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_data_agents(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataAgentsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_data_agents_rest_interceptors(null_interceptor): + transport = transports.DataAgentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataAgentServiceRestInterceptor(), + ) + client = DataAgentServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataAgentServiceRestInterceptor, "post_list_data_agents" + ) as post, mock.patch.object( + transports.DataAgentServiceRestInterceptor, + "post_list_data_agents_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.DataAgentServiceRestInterceptor, "pre_list_data_agents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = data_agent_service.ListDataAgentsRequest.pb( + data_agent_service.ListDataAgentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = data_agent_service.ListDataAgentsResponse.to_json( + data_agent_service.ListDataAgentsResponse() + ) + req.return_value.content = return_value + + request = data_agent_service.ListDataAgentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = data_agent_service.ListDataAgentsResponse() + post_with_metadata.return_value = ( + data_agent_service.ListDataAgentsResponse(), + metadata, + ) + + client.list_data_agents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_accessible_data_agents_rest_bad_request( + request_type=data_agent_service.ListAccessibleDataAgentsRequest, +): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_accessible_data_agents(request) + + +@pytest.mark.parametrize( + "request_type", + [ + data_agent_service.ListAccessibleDataAgentsRequest, + dict, + ], +) +def test_list_accessible_data_agents_rest_call_success(request_type): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_agent_service.ListAccessibleDataAgentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_agent_service.ListAccessibleDataAgentsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_accessible_data_agents(request) - assert args[0] == request_msg + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAccessibleDataAgentsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_set_iam_policy_empty_call_grpc_asyncio(): - client = DataAgentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_accessible_data_agents_rest_interceptors(null_interceptor): + transport = transports.DataAgentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataAgentServiceRestInterceptor(), ) + client = DataAgentServiceClient(transport=transport) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy( - version=774, - etag=b"etag_blob", - ) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataAgentServiceRestInterceptor, "post_list_accessible_data_agents" + ) as post, mock.patch.object( + transports.DataAgentServiceRestInterceptor, + "post_list_accessible_data_agents_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.DataAgentServiceRestInterceptor, "pre_list_accessible_data_agents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = data_agent_service.ListAccessibleDataAgentsRequest.pb( + data_agent_service.ListAccessibleDataAgentsRequest() ) - await client.set_iam_policy(request=None) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = data_agent_service.ListAccessibleDataAgentsResponse.to_json( + data_agent_service.ListAccessibleDataAgentsResponse() + ) + req.return_value.content = return_value - assert args[0] == request_msg + request = data_agent_service.ListAccessibleDataAgentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = data_agent_service.ListAccessibleDataAgentsResponse() + post_with_metadata.return_value = ( + data_agent_service.ListAccessibleDataAgentsResponse(), + metadata, + ) + client.list_accessible_data_agents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) -def test_transport_kind_rest(): - transport = DataAgentServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() -def test_list_data_agents_rest_bad_request( - request_type=data_agent_service.ListDataAgentsRequest, +def test_get_data_agent_rest_bad_request( + request_type=data_agent_service.GetDataAgentRequest, ): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/dataAgents/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6833,31 +9008,32 @@ def test_list_data_agents_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_data_agents(request) + client.get_data_agent(request) @pytest.mark.parametrize( "request_type", [ - data_agent_service.ListDataAgentsRequest, + data_agent_service.GetDataAgentRequest, dict, ], ) -def test_list_data_agents_rest_call_success(request_type): +def test_get_data_agent_rest_call_success(request_type): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/dataAgents/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = data_agent_service.ListDataAgentsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = data_agent.DataAgent( + name="name_value", + display_name="display_name_value", + description="description_value", ) # Wrap the value into a proper Response obj @@ -6865,21 +9041,22 @@ def test_list_data_agents_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = data_agent_service.ListDataAgentsResponse.pb(return_value) + return_value = data_agent.DataAgent.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_data_agents(request) + response = client.get_data_agent(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataAgentsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, data_agent.DataAgent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_data_agents_rest_interceptors(null_interceptor): +def test_get_data_agent_rest_interceptors(null_interceptor): transport = transports.DataAgentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6893,18 +9070,17 @@ def test_list_data_agents_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DataAgentServiceRestInterceptor, "post_list_data_agents" + transports.DataAgentServiceRestInterceptor, "post_get_data_agent" ) as post, mock.patch.object( - transports.DataAgentServiceRestInterceptor, - "post_list_data_agents_with_metadata", + transports.DataAgentServiceRestInterceptor, "post_get_data_agent_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.DataAgentServiceRestInterceptor, "pre_list_data_agents" + transports.DataAgentServiceRestInterceptor, "pre_get_data_agent" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = data_agent_service.ListDataAgentsRequest.pb( - data_agent_service.ListDataAgentsRequest() + pb_message = data_agent_service.GetDataAgentRequest.pb( + data_agent_service.GetDataAgentRequest() ) transcode.return_value = { "method": "post", @@ -6916,107 +9092,325 @@ def test_list_data_agents_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = data_agent_service.ListDataAgentsResponse.to_json( - data_agent_service.ListDataAgentsResponse() - ) + return_value = data_agent.DataAgent.to_json(data_agent.DataAgent()) req.return_value.content = return_value - request = data_agent_service.ListDataAgentsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = data_agent_service.ListDataAgentsResponse() - post_with_metadata.return_value = ( - data_agent_service.ListDataAgentsResponse(), - metadata, - ) + request = data_agent_service.GetDataAgentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = data_agent.DataAgent() + post_with_metadata.return_value = data_agent.DataAgent(), metadata + + client.get_data_agent( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_data_agent_rest_bad_request( + request_type=data_agent_service.CreateDataAgentRequest, +): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_data_agent(request) + + +@pytest.mark.parametrize( + "request_type", + [ + data_agent_service.CreateDataAgentRequest, + dict, + ], +) +def test_create_data_agent_rest_call_success(request_type): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["data_agent"] = { + "data_analytics_agent": { + "staging_context": { + "system_instruction": "system_instruction_value", + "datasource_references": { + "bq": { + "table_references": [ + { + "project_id": "project_id_value", + "dataset_id": "dataset_id_value", + "table_id": "table_id_value", + "schema": { + "fields": [ + { + "name": "name_value", + "type_": "type__value", + "description": "description_value", + "mode": "mode_value", + "synonyms": [ + "synonyms_value1", + "synonyms_value2", + ], + "tags": ["tags_value1", "tags_value2"], + "display_name": "display_name_value", + "subfields": {}, + "category": "category_value", + "value_format": "value_format_value", + } + ], + "description": "description_value", + "synonyms": ["synonyms_value1", "synonyms_value2"], + "tags": ["tags_value1", "tags_value2"], + "display_name": "display_name_value", + "filters": [ + { + "field": "field_value", + "value": "value_value", + "type_": 1, + } + ], + }, + } + ] + }, + "studio": { + "studio_references": [{"datasource_id": "datasource_id_value"}] + }, + "looker": { + "explore_references": [ + { + "looker_instance_uri": "looker_instance_uri_value", + "private_looker_instance_info": { + "looker_instance_id": "looker_instance_id_value", + "service_directory_name": "service_directory_name_value", + }, + "lookml_model": "lookml_model_value", + "explore": "explore_value", + "schema": {}, + } + ], + "credentials": { + "oauth": { + "secret": { + "client_id": "client_id_value", + "client_secret": "client_secret_value", + }, + "token": {"access_token": "access_token_value"}, + } + }, + }, + "alloydb": { + "database_reference": { + "project_id": "project_id_value", + "region": "region_value", + "cluster_id": "cluster_id_value", + "instance_id": "instance_id_value", + "database_id": "database_id_value", + "table_ids": ["table_ids_value1", "table_ids_value2"], + }, + "agent_context_reference": { + "context_set_id": "context_set_id_value" + }, + }, + "spanner_reference": { + "database_reference": { + "engine": 1, + "project_id": "project_id_value", + "region": "region_value", + "instance_id": "instance_id_value", + "database_id": "database_id_value", + "table_ids": ["table_ids_value1", "table_ids_value2"], + }, + "agent_context_reference": {}, + }, + "cloud_sql_reference": { + "database_reference": { + "engine": 1, + "project_id": "project_id_value", + "region": "region_value", + "instance_id": "instance_id_value", + "database_id": "database_id_value", + "table_ids": ["table_ids_value1", "table_ids_value2"], + }, + "agent_context_reference": {}, + }, + }, + "options": { + "chart": {"image": {"no_image": {}, "svg": {}}}, + "analysis": {"python": {"enabled": True}}, + "datasource": {"big_query_max_billed_bytes": {"value": 541}}, + }, + "example_queries": [ + { + "sql_query": "sql_query_value", + "natural_language_question": "natural_language_question_value", + } + ], + "looker_golden_queries": [ + { + "natural_language_questions": [ + "natural_language_questions_value1", + "natural_language_questions_value2", + ], + "looker_query": { + "model": "model_value", + "explore": "explore_value", + "fields": ["fields_value1", "fields_value2"], + "filters": [ + {"field": "field_value", "value": "value_value"} + ], + "sorts": ["sorts_value1", "sorts_value2"], + "limit": "limit_value", + }, + } + ], + "glossary_terms": [ + { + "display_name": "display_name_value", + "description": "description_value", + "labels": ["labels_value1", "labels_value2"], + } + ], + "schema_relationships": [ + { + "left_schema_paths": { + "table_fqn": "table_fqn_value", + "paths": ["paths_value1", "paths_value2"], + }, + "right_schema_paths": {}, + "sources": [1], + "confidence_score": 0.1673, + } + ], + }, + "published_context": {}, + "last_published_context": {}, + }, + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "purge_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = data_agent_service.CreateDataAgentRequest.meta.fields["data_agent"] - client.list_data_agents( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields -def test_list_accessible_data_agents_rest_bad_request( - request_type=data_agent_service.ListAccessibleDataAgentsRequest, -): - client = DataAgentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_accessible_data_agents(request) + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["data_agent"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -@pytest.mark.parametrize( - "request_type", - [ - data_agent_service.ListAccessibleDataAgentsRequest, - dict, - ], -) -def test_list_accessible_data_agents_rest_call_success(request_type): - client = DataAgentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_agent"][field])): + del request_init["data_agent"][field][i][subfield] + else: + del request_init["data_agent"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = data_agent_service.ListAccessibleDataAgentsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = data_agent_service.ListAccessibleDataAgentsResponse.pb( - return_value - ) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_accessible_data_agents(request) + response = client.create_data_agent(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAccessibleDataAgentsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_accessible_data_agents_rest_interceptors(null_interceptor): +def test_create_data_agent_rest_interceptors(null_interceptor): transport = transports.DataAgentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7030,18 +9424,20 @@ def test_list_accessible_data_agents_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DataAgentServiceRestInterceptor, "post_list_accessible_data_agents" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DataAgentServiceRestInterceptor, "post_create_data_agent" ) as post, mock.patch.object( transports.DataAgentServiceRestInterceptor, - "post_list_accessible_data_agents_with_metadata", + "post_create_data_agent_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.DataAgentServiceRestInterceptor, "pre_list_accessible_data_agents" + transports.DataAgentServiceRestInterceptor, "pre_create_data_agent" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = data_agent_service.ListAccessibleDataAgentsRequest.pb( - data_agent_service.ListAccessibleDataAgentsRequest() + pb_message = data_agent_service.CreateDataAgentRequest.pb( + data_agent_service.CreateDataAgentRequest() ) transcode.return_value = { "method": "post", @@ -7053,24 +9449,19 @@ def test_list_accessible_data_agents_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = data_agent_service.ListAccessibleDataAgentsResponse.to_json( - data_agent_service.ListAccessibleDataAgentsResponse() - ) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = data_agent_service.ListAccessibleDataAgentsRequest() + request = data_agent_service.CreateDataAgentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = data_agent_service.ListAccessibleDataAgentsResponse() - post_with_metadata.return_value = ( - data_agent_service.ListAccessibleDataAgentsResponse(), - metadata, - ) + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.list_accessible_data_agents( + client.create_data_agent( request, metadata=[ ("key", "val"), @@ -7083,14 +9474,14 @@ def test_list_accessible_data_agents_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_data_agent_rest_bad_request( - request_type=data_agent_service.GetDataAgentRequest, +def test_create_data_agent_sync_rest_bad_request( + request_type=data_agent_service.CreateDataAgentRequest, ): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/dataAgents/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7105,29 +9496,262 @@ def test_get_data_agent_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_data_agent(request) + client.create_data_agent_sync(request) @pytest.mark.parametrize( "request_type", [ - data_agent_service.GetDataAgentRequest, + data_agent_service.CreateDataAgentRequest, dict, ], ) -def test_get_data_agent_rest_call_success(request_type): +def test_create_data_agent_sync_rest_call_success(request_type): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/dataAgents/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["data_agent"] = { + "data_analytics_agent": { + "staging_context": { + "system_instruction": "system_instruction_value", + "datasource_references": { + "bq": { + "table_references": [ + { + "project_id": "project_id_value", + "dataset_id": "dataset_id_value", + "table_id": "table_id_value", + "schema": { + "fields": [ + { + "name": "name_value", + "type_": "type__value", + "description": "description_value", + "mode": "mode_value", + "synonyms": [ + "synonyms_value1", + "synonyms_value2", + ], + "tags": ["tags_value1", "tags_value2"], + "display_name": "display_name_value", + "subfields": {}, + "category": "category_value", + "value_format": "value_format_value", + } + ], + "description": "description_value", + "synonyms": ["synonyms_value1", "synonyms_value2"], + "tags": ["tags_value1", "tags_value2"], + "display_name": "display_name_value", + "filters": [ + { + "field": "field_value", + "value": "value_value", + "type_": 1, + } + ], + }, + } + ] + }, + "studio": { + "studio_references": [{"datasource_id": "datasource_id_value"}] + }, + "looker": { + "explore_references": [ + { + "looker_instance_uri": "looker_instance_uri_value", + "private_looker_instance_info": { + "looker_instance_id": "looker_instance_id_value", + "service_directory_name": "service_directory_name_value", + }, + "lookml_model": "lookml_model_value", + "explore": "explore_value", + "schema": {}, + } + ], + "credentials": { + "oauth": { + "secret": { + "client_id": "client_id_value", + "client_secret": "client_secret_value", + }, + "token": {"access_token": "access_token_value"}, + } + }, + }, + "alloydb": { + "database_reference": { + "project_id": "project_id_value", + "region": "region_value", + "cluster_id": "cluster_id_value", + "instance_id": "instance_id_value", + "database_id": "database_id_value", + "table_ids": ["table_ids_value1", "table_ids_value2"], + }, + "agent_context_reference": { + "context_set_id": "context_set_id_value" + }, + }, + "spanner_reference": { + "database_reference": { + "engine": 1, + "project_id": "project_id_value", + "region": "region_value", + "instance_id": "instance_id_value", + "database_id": "database_id_value", + "table_ids": ["table_ids_value1", "table_ids_value2"], + }, + "agent_context_reference": {}, + }, + "cloud_sql_reference": { + "database_reference": { + "engine": 1, + "project_id": "project_id_value", + "region": "region_value", + "instance_id": "instance_id_value", + "database_id": "database_id_value", + "table_ids": ["table_ids_value1", "table_ids_value2"], + }, + "agent_context_reference": {}, + }, + }, + "options": { + "chart": {"image": {"no_image": {}, "svg": {}}}, + "analysis": {"python": {"enabled": True}}, + "datasource": {"big_query_max_billed_bytes": {"value": 541}}, + }, + "example_queries": [ + { + "sql_query": "sql_query_value", + "natural_language_question": "natural_language_question_value", + } + ], + "looker_golden_queries": [ + { + "natural_language_questions": [ + "natural_language_questions_value1", + "natural_language_questions_value2", + ], + "looker_query": { + "model": "model_value", + "explore": "explore_value", + "fields": ["fields_value1", "fields_value2"], + "filters": [ + {"field": "field_value", "value": "value_value"} + ], + "sorts": ["sorts_value1", "sorts_value2"], + "limit": "limit_value", + }, + } + ], + "glossary_terms": [ + { + "display_name": "display_name_value", + "description": "description_value", + "labels": ["labels_value1", "labels_value2"], + } + ], + "schema_relationships": [ + { + "left_schema_paths": { + "table_fqn": "table_fqn_value", + "paths": ["paths_value1", "paths_value2"], + }, + "right_schema_paths": {}, + "sources": [1], + "confidence_score": 0.1673, + } + ], + }, + "published_context": {}, + "last_published_context": {}, + }, + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "purge_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = data_agent_service.CreateDataAgentRequest.meta.fields["data_agent"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["data_agent"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_agent"][field])): + del request_init["data_agent"][field][i][subfield] + else: + del request_init["data_agent"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = data_agent.DataAgent( + return_value = gcg_data_agent.DataAgent( name="name_value", display_name="display_name_value", description="description_value", @@ -7138,22 +9762,22 @@ def test_get_data_agent_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = data_agent.DataAgent.pb(return_value) + return_value = gcg_data_agent.DataAgent.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_data_agent(request) + response = client.create_data_agent_sync(request) # Establish that the response is the type that we expect. - assert isinstance(response, data_agent.DataAgent) + assert isinstance(response, gcg_data_agent.DataAgent) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_data_agent_rest_interceptors(null_interceptor): +def test_create_data_agent_sync_rest_interceptors(null_interceptor): transport = transports.DataAgentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7167,17 +9791,18 @@ def test_get_data_agent_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DataAgentServiceRestInterceptor, "post_get_data_agent" + transports.DataAgentServiceRestInterceptor, "post_create_data_agent_sync" ) as post, mock.patch.object( - transports.DataAgentServiceRestInterceptor, "post_get_data_agent_with_metadata" + transports.DataAgentServiceRestInterceptor, + "post_create_data_agent_sync_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.DataAgentServiceRestInterceptor, "pre_get_data_agent" + transports.DataAgentServiceRestInterceptor, "pre_create_data_agent_sync" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = data_agent_service.GetDataAgentRequest.pb( - data_agent_service.GetDataAgentRequest() + pb_message = data_agent_service.CreateDataAgentRequest.pb( + data_agent_service.CreateDataAgentRequest() ) transcode.return_value = { "method": "post", @@ -7189,19 +9814,19 @@ def test_get_data_agent_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = data_agent.DataAgent.to_json(data_agent.DataAgent()) + return_value = gcg_data_agent.DataAgent.to_json(gcg_data_agent.DataAgent()) req.return_value.content = return_value - request = data_agent_service.GetDataAgentRequest() + request = data_agent_service.CreateDataAgentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = data_agent.DataAgent() - post_with_metadata.return_value = data_agent.DataAgent(), metadata + post.return_value = gcg_data_agent.DataAgent() + post_with_metadata.return_value = gcg_data_agent.DataAgent(), metadata - client.get_data_agent( + client.create_data_agent_sync( request, metadata=[ ("key", "val"), @@ -7214,14 +9839,16 @@ def test_get_data_agent_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_data_agent_rest_bad_request( - request_type=data_agent_service.CreateDataAgentRequest, +def test_update_data_agent_rest_bad_request( + request_type=data_agent_service.UpdateDataAgentRequest, ): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "data_agent": {"name": "projects/sample1/locations/sample2/dataAgents/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7236,23 +9863,25 @@ def test_create_data_agent_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_data_agent(request) + client.update_data_agent(request) @pytest.mark.parametrize( "request_type", [ - data_agent_service.CreateDataAgentRequest, + data_agent_service.UpdateDataAgentRequest, dict, ], ) -def test_create_data_agent_rest_call_success(request_type): +def test_update_data_agent_rest_call_success(request_type): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "data_agent": {"name": "projects/sample1/locations/sample2/dataAgents/sample3"} + } request_init["data_agent"] = { "data_analytics_agent": { "staging_context": { @@ -7410,7 +10039,7 @@ def test_create_data_agent_rest_call_success(request_type): "published_context": {}, "last_published_context": {}, }, - "name": "name_value", + "name": "projects/sample1/locations/sample2/dataAgents/sample3", "display_name": "display_name_value", "description": "description_value", "labels": {}, @@ -7424,7 +10053,7 @@ def test_create_data_agent_rest_call_success(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = data_agent_service.CreateDataAgentRequest.meta.fields["data_agent"] + test_field = data_agent_service.UpdateDataAgentRequest.meta.fields["data_agent"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -7500,14 +10129,14 @@ def get_message_fields(field): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_data_agent(request) + response = client.update_data_agent(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_data_agent_rest_interceptors(null_interceptor): +def test_update_data_agent_rest_interceptors(null_interceptor): transport = transports.DataAgentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7523,18 +10152,18 @@ def test_create_data_agent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.DataAgentServiceRestInterceptor, "post_create_data_agent" + transports.DataAgentServiceRestInterceptor, "post_update_data_agent" ) as post, mock.patch.object( transports.DataAgentServiceRestInterceptor, - "post_create_data_agent_with_metadata", + "post_update_data_agent_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.DataAgentServiceRestInterceptor, "pre_create_data_agent" + transports.DataAgentServiceRestInterceptor, "pre_update_data_agent" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = data_agent_service.CreateDataAgentRequest.pb( - data_agent_service.CreateDataAgentRequest() + pb_message = data_agent_service.UpdateDataAgentRequest.pb( + data_agent_service.UpdateDataAgentRequest() ) transcode.return_value = { "method": "post", @@ -7549,7 +10178,7 @@ def test_create_data_agent_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = data_agent_service.CreateDataAgentRequest() + request = data_agent_service.UpdateDataAgentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -7558,7 +10187,7 @@ def test_create_data_agent_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.create_data_agent( + client.update_data_agent( request, metadata=[ ("key", "val"), @@ -7571,7 +10200,7 @@ def test_create_data_agent_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_update_data_agent_rest_bad_request( +def test_update_data_agent_sync_rest_bad_request( request_type=data_agent_service.UpdateDataAgentRequest, ): client = DataAgentServiceClient( @@ -7595,7 +10224,7 @@ def test_update_data_agent_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_data_agent(request) + client.update_data_agent_sync(request) @pytest.mark.parametrize( @@ -7605,7 +10234,7 @@ def test_update_data_agent_rest_bad_request( dict, ], ) -def test_update_data_agent_rest_call_success(request_type): +def test_update_data_agent_sync_rest_call_success(request_type): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7852,23 +10481,33 @@ def get_message_fields(field): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = gcg_data_agent.DataAgent( + name="name_value", + display_name="display_name_value", + description="description_value", + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcg_data_agent.DataAgent.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_data_agent(request) + response = client.update_data_agent_sync(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, gcg_data_agent.DataAgent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_data_agent_rest_interceptors(null_interceptor): +def test_update_data_agent_sync_rest_interceptors(null_interceptor): transport = transports.DataAgentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7882,14 +10521,12 @@ def test_update_data_agent_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.DataAgentServiceRestInterceptor, "post_update_data_agent" + transports.DataAgentServiceRestInterceptor, "post_update_data_agent_sync" ) as post, mock.patch.object( transports.DataAgentServiceRestInterceptor, - "post_update_data_agent_with_metadata", + "post_update_data_agent_sync_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.DataAgentServiceRestInterceptor, "pre_update_data_agent" + transports.DataAgentServiceRestInterceptor, "pre_update_data_agent_sync" ) as pre: pre.assert_not_called() post.assert_not_called() @@ -7907,7 +10544,7 @@ def test_update_data_agent_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = gcg_data_agent.DataAgent.to_json(gcg_data_agent.DataAgent()) req.return_value.content = return_value request = data_agent_service.UpdateDataAgentRequest() @@ -7916,10 +10553,10 @@ def test_update_data_agent_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = gcg_data_agent.DataAgent() + post_with_metadata.return_value = gcg_data_agent.DataAgent(), metadata - client.update_data_agent( + client.update_data_agent_sync( request, metadata=[ ("key", "val"), @@ -8056,6 +10693,115 @@ def test_delete_data_agent_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() +def test_delete_data_agent_sync_rest_bad_request( + request_type=data_agent_service.DeleteDataAgentRequest, +): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dataAgents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_data_agent_sync(request) + + +@pytest.mark.parametrize( + "request_type", + [ + data_agent_service.DeleteDataAgentRequest, + dict, + ], +) +def test_delete_data_agent_sync_rest_call_success(request_type): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dataAgents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_data_agent_sync(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_data_agent_sync_rest_interceptors(null_interceptor): + transport = transports.DataAgentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataAgentServiceRestInterceptor(), + ) + client = DataAgentServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataAgentServiceRestInterceptor, "pre_delete_data_agent_sync" + ) as pre: + pre.assert_not_called() + pb_message = data_agent_service.DeleteDataAgentRequest.pb( + data_agent_service.DeleteDataAgentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = data_agent_service.DeleteDataAgentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_data_agent_sync( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + def test_get_iam_policy_rest_bad_request( request_type=iam_policy_pb2.GetIamPolicyRequest, ): @@ -8763,6 +11509,28 @@ def test_create_data_agent_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_data_agent_sync_empty_call_rest(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_agent_sync), "__call__" + ) as call: + client.create_data_agent_sync(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_agent_service.CreateDataAgentRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_update_data_agent_empty_call_rest(): @@ -8785,6 +11553,28 @@ def test_update_data_agent_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_data_agent_sync_empty_call_rest(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_agent_sync), "__call__" + ) as call: + client.update_data_agent_sync(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_agent_service.UpdateDataAgentRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_delete_data_agent_empty_call_rest(): @@ -8807,6 +11597,28 @@ def test_delete_data_agent_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_data_agent_sync_empty_call_rest(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_agent_sync), "__call__" + ) as call: + client.delete_data_agent_sync(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_agent_service.DeleteDataAgentRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_get_iam_policy_empty_call_rest(): @@ -8901,8 +11713,11 @@ def test_data_agent_service_base_transport(): "list_accessible_data_agents", "get_data_agent", "create_data_agent", + "create_data_agent_sync", "update_data_agent", + "update_data_agent_sync", "delete_data_agent", + "delete_data_agent_sync", "get_iam_policy", "set_iam_policy", "get_location", @@ -9186,12 +12001,21 @@ def test_data_agent_service_client_transport_session_collision(transport_name): session1 = client1.transport.create_data_agent._session session2 = client2.transport.create_data_agent._session assert session1 != session2 + session1 = client1.transport.create_data_agent_sync._session + session2 = client2.transport.create_data_agent_sync._session + assert session1 != session2 session1 = client1.transport.update_data_agent._session session2 = client2.transport.update_data_agent._session assert session1 != session2 + session1 = client1.transport.update_data_agent_sync._session + session2 = client2.transport.update_data_agent_sync._session + assert session1 != session2 session1 = client1.transport.delete_data_agent._session session2 = client2.transport.delete_data_agent._session assert session1 != session2 + session1 = client1.transport.delete_data_agent_sync._session + session2 = client2.transport.delete_data_agent_sync._session + assert session1 != session2 session1 = client1.transport.get_iam_policy._session session2 = client2.transport.get_iam_policy._session assert session1 != session2 diff --git a/packages/google-cloud-geminidataanalytics/tests/unit/gapic/geminidataanalytics_v1beta/test_data_agent_service.py b/packages/google-cloud-geminidataanalytics/tests/unit/gapic/geminidataanalytics_v1beta/test_data_agent_service.py index 14d317413d95..1cc680ea7ec3 100644 --- a/packages/google-cloud-geminidataanalytics/tests/unit/gapic/geminidataanalytics_v1beta/test_data_agent_service.py +++ b/packages/google-cloud-geminidataanalytics/tests/unit/gapic/geminidataanalytics_v1beta/test_data_agent_service.py @@ -3189,11 +3189,11 @@ async def test_create_data_agent_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - data_agent_service.UpdateDataAgentRequest, + data_agent_service.CreateDataAgentRequest, dict, ], ) -def test_update_data_agent(request_type, transport: str = "grpc"): +def test_create_data_agent_sync(request_type, transport: str = "grpc"): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3205,23 +3205,30 @@ def test_update_data_agent(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_agent), "__call__" + type(client.transport.create_data_agent_sync), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_data_agent(request) + call.return_value = gcg_data_agent.DataAgent( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + response = client.create_data_agent_sync(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = data_agent_service.UpdateDataAgentRequest() + request = data_agent_service.CreateDataAgentRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, gcg_data_agent.DataAgent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" -def test_update_data_agent_non_empty_request_with_auto_populated_field(): +def test_create_data_agent_sync_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = DataAgentServiceClient( @@ -3232,22 +3239,28 @@ def test_update_data_agent_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = data_agent_service.UpdateDataAgentRequest() + request = data_agent_service.CreateDataAgentRequest( + parent="parent_value", + data_agent_id="data_agent_id_value", + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_agent), "__call__" + type(client.transport.create_data_agent_sync), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_data_agent(request=request) + client.create_data_agent_sync(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == data_agent_service.UpdateDataAgentRequest() + assert args[0] == data_agent_service.CreateDataAgentRequest( + parent="parent_value", + data_agent_id="data_agent_id_value", + ) -def test_update_data_agent_use_cached_wrapped_rpc(): +def test_create_data_agent_sync_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3261,7 +3274,10 @@ def test_update_data_agent_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_data_agent in client._transport._wrapped_methods + assert ( + client._transport.create_data_agent_sync + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -3269,20 +3285,15 @@ def test_update_data_agent_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_data_agent + client._transport.create_data_agent_sync ] = mock_rpc request = {} - client.update_data_agent(request) + client.create_data_agent_sync(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_data_agent(request) + client.create_data_agent_sync(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3290,7 +3301,7 @@ def test_update_data_agent_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_data_agent_async_use_cached_wrapped_rpc( +async def test_create_data_agent_sync_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3307,7 +3318,7 @@ async def test_update_data_agent_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_data_agent + client._client._transport.create_data_agent_sync in client._client._transport._wrapped_methods ) @@ -3315,21 +3326,16 @@ async def test_update_data_agent_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_data_agent + client._client._transport.create_data_agent_sync ] = mock_rpc request = {} - await client.update_data_agent(request) + await client.create_data_agent_sync(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_data_agent(request) + await client.create_data_agent_sync(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3337,9 +3343,9 @@ async def test_update_data_agent_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_data_agent_async( +async def test_create_data_agent_sync_async( transport: str = "grpc_asyncio", - request_type=data_agent_service.UpdateDataAgentRequest, + request_type=data_agent_service.CreateDataAgentRequest, ): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -3352,46 +3358,53 @@ async def test_update_data_agent_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_agent), "__call__" + type(client.transport.create_data_agent_sync), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + gcg_data_agent.DataAgent( + name="name_value", + display_name="display_name_value", + description="description_value", + ) ) - response = await client.update_data_agent(request) + response = await client.create_data_agent_sync(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = data_agent_service.UpdateDataAgentRequest() + request = data_agent_service.CreateDataAgentRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, gcg_data_agent.DataAgent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" @pytest.mark.asyncio -async def test_update_data_agent_async_from_dict(): - await test_update_data_agent_async(request_type=dict) +async def test_create_data_agent_sync_async_from_dict(): + await test_create_data_agent_sync_async(request_type=dict) -def test_update_data_agent_field_headers(): +def test_create_data_agent_sync_field_headers(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = data_agent_service.UpdateDataAgentRequest() + request = data_agent_service.CreateDataAgentRequest() - request.data_agent.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_agent), "__call__" + type(client.transport.create_data_agent_sync), "__call__" ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_data_agent(request) + call.return_value = gcg_data_agent.DataAgent() + client.create_data_agent_sync(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3402,30 +3415,30 @@ def test_update_data_agent_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "data_agent.name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_data_agent_field_headers_async(): +async def test_create_data_agent_sync_field_headers_async(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = data_agent_service.UpdateDataAgentRequest() + request = data_agent_service.CreateDataAgentRequest() - request.data_agent.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_agent), "__call__" + type(client.transport.create_data_agent_sync), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + gcg_data_agent.DataAgent() ) - await client.update_data_agent(request) + await client.create_data_agent_sync(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3436,24 +3449,25 @@ async def test_update_data_agent_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "data_agent.name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_data_agent_flattened(): +def test_create_data_agent_sync_flattened(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_agent), "__call__" + type(client.transport.create_data_agent_sync), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = gcg_data_agent.DataAgent() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_data_agent( + client.create_data_agent_sync( + parent="parent_value", data_agent=gcg_data_agent.DataAgent( data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( staging_context=context.Context( @@ -3461,13 +3475,16 @@ def test_update_data_agent_flattened(): ) ) ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + data_agent_id="data_agent_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val arg = args[0].data_agent mock_val = gcg_data_agent.DataAgent( data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( @@ -3477,12 +3494,12 @@ def test_update_data_agent_flattened(): ) ) assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].data_agent_id + mock_val = "data_agent_id_value" assert arg == mock_val -def test_update_data_agent_flattened_error(): +def test_create_data_agent_sync_flattened_error(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3490,8 +3507,9 @@ def test_update_data_agent_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_data_agent( - data_agent_service.UpdateDataAgentRequest(), + client.create_data_agent_sync( + data_agent_service.CreateDataAgentRequest(), + parent="parent_value", data_agent=gcg_data_agent.DataAgent( data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( staging_context=context.Context( @@ -3499,29 +3517,30 @@ def test_update_data_agent_flattened_error(): ) ) ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + data_agent_id="data_agent_id_value", ) @pytest.mark.asyncio -async def test_update_data_agent_flattened_async(): +async def test_create_data_agent_sync_flattened_async(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_agent), "__call__" + type(client.transport.create_data_agent_sync), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = gcg_data_agent.DataAgent() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + gcg_data_agent.DataAgent() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_data_agent( + response = await client.create_data_agent_sync( + parent="parent_value", data_agent=gcg_data_agent.DataAgent( data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( staging_context=context.Context( @@ -3529,13 +3548,16 @@ async def test_update_data_agent_flattened_async(): ) ) ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + data_agent_id="data_agent_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val arg = args[0].data_agent mock_val = gcg_data_agent.DataAgent( data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( @@ -3545,13 +3567,13 @@ async def test_update_data_agent_flattened_async(): ) ) assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].data_agent_id + mock_val = "data_agent_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_data_agent_flattened_error_async(): +async def test_create_data_agent_sync_flattened_error_async(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -3559,8 +3581,9 @@ async def test_update_data_agent_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_data_agent( - data_agent_service.UpdateDataAgentRequest(), + await client.create_data_agent_sync( + data_agent_service.CreateDataAgentRequest(), + parent="parent_value", data_agent=gcg_data_agent.DataAgent( data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( staging_context=context.Context( @@ -3568,18 +3591,18 @@ async def test_update_data_agent_flattened_error_async(): ) ) ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + data_agent_id="data_agent_id_value", ) @pytest.mark.parametrize( "request_type", [ - data_agent_service.DeleteDataAgentRequest, + data_agent_service.UpdateDataAgentRequest, dict, ], ) -def test_delete_data_agent(request_type, transport: str = "grpc"): +def test_update_data_agent(request_type, transport: str = "grpc"): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3591,23 +3614,23 @@ def test_delete_data_agent(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_data_agent), "__call__" + type(client.transport.update_data_agent), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_data_agent(request) + response = client.update_data_agent(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = data_agent_service.DeleteDataAgentRequest() + request = data_agent_service.UpdateDataAgentRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_delete_data_agent_non_empty_request_with_auto_populated_field(): +def test_update_data_agent_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = DataAgentServiceClient( @@ -3618,26 +3641,22 @@ def test_delete_data_agent_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = data_agent_service.DeleteDataAgentRequest( - name="name_value", - ) + request = data_agent_service.UpdateDataAgentRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_data_agent), "__call__" + type(client.transport.update_data_agent), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_data_agent(request=request) + client.update_data_agent(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == data_agent_service.DeleteDataAgentRequest( - name="name_value", - ) + assert args[0] == data_agent_service.UpdateDataAgentRequest() -def test_delete_data_agent_use_cached_wrapped_rpc(): +def test_update_data_agent_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3651,7 +3670,7 @@ def test_delete_data_agent_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_data_agent in client._transport._wrapped_methods + assert client._transport.update_data_agent in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -3659,10 +3678,10 @@ def test_delete_data_agent_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_data_agent + client._transport.update_data_agent ] = mock_rpc request = {} - client.delete_data_agent(request) + client.update_data_agent(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -3672,7 +3691,7 @@ def test_delete_data_agent_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_data_agent(request) + client.update_data_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3680,7 +3699,7 @@ def test_delete_data_agent_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_data_agent_async_use_cached_wrapped_rpc( +async def test_update_data_agent_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3697,7 +3716,7 @@ async def test_delete_data_agent_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_data_agent + client._client._transport.update_data_agent in client._client._transport._wrapped_methods ) @@ -3705,11 +3724,11 @@ async def test_delete_data_agent_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_data_agent + client._client._transport.update_data_agent ] = mock_rpc request = {} - await client.delete_data_agent(request) + await client.update_data_agent(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -3719,7 +3738,7 @@ async def test_delete_data_agent_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.delete_data_agent(request) + await client.update_data_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3727,9 +3746,9 @@ async def test_delete_data_agent_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_data_agent_async( +async def test_update_data_agent_async( transport: str = "grpc_asyncio", - request_type=data_agent_service.DeleteDataAgentRequest, + request_type=data_agent_service.UpdateDataAgentRequest, ): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -3742,18 +3761,18 @@ async def test_delete_data_agent_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_data_agent), "__call__" + type(client.transport.update_data_agent), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.delete_data_agent(request) + response = await client.update_data_agent(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = data_agent_service.DeleteDataAgentRequest() + request = data_agent_service.UpdateDataAgentRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -3761,27 +3780,27 @@ async def test_delete_data_agent_async( @pytest.mark.asyncio -async def test_delete_data_agent_async_from_dict(): - await test_delete_data_agent_async(request_type=dict) +async def test_update_data_agent_async_from_dict(): + await test_update_data_agent_async(request_type=dict) -def test_delete_data_agent_field_headers(): +def test_update_data_agent_field_headers(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = data_agent_service.DeleteDataAgentRequest() + request = data_agent_service.UpdateDataAgentRequest() - request.name = "name_value" + request.data_agent.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_data_agent), "__call__" + type(client.transport.update_data_agent), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_data_agent(request) + client.update_data_agent(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3792,30 +3811,30 @@ def test_delete_data_agent_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "data_agent.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_data_agent_field_headers_async(): +async def test_update_data_agent_field_headers_async(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = data_agent_service.DeleteDataAgentRequest() + request = data_agent_service.UpdateDataAgentRequest() - request.name = "name_value" + request.data_agent.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_data_agent), "__call__" + type(client.transport.update_data_agent), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.delete_data_agent(request) + await client.update_data_agent(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3826,37 +3845,53 @@ async def test_delete_data_agent_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "data_agent.name=name_value", ) in kw["metadata"] -def test_delete_data_agent_flattened(): +def test_update_data_agent_flattened(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_data_agent), "__call__" + type(client.transport.update_data_agent), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_data_agent( - name="name_value", + client.update_data_agent( + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].data_agent + mock_val = gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_delete_data_agent_flattened_error(): +def test_update_data_agent_flattened_error(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3864,21 +3899,28 @@ def test_delete_data_agent_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_data_agent( - data_agent_service.DeleteDataAgentRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_delete_data_agent_flattened_async(): - client = DataAgentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) + client.update_data_agent( + data_agent_service.UpdateDataAgentRequest(), + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_data_agent_flattened_async(): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_data_agent), "__call__" + type(client.transport.update_data_agent), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -3888,21 +3930,37 @@ async def test_delete_data_agent_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_data_agent( - name="name_value", + response = await client.update_data_agent( + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].data_agent + mock_val = gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_delete_data_agent_flattened_error_async(): +async def test_update_data_agent_flattened_error_async(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -3910,20 +3968,27 @@ async def test_delete_data_agent_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_data_agent( - data_agent_service.DeleteDataAgentRequest(), - name="name_value", + await client.update_data_agent( + data_agent_service.UpdateDataAgentRequest(), + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - iam_policy_pb2.GetIamPolicyRequest, + data_agent_service.UpdateDataAgentRequest, dict, ], ) -def test_get_iam_policy(request_type, transport: str = "grpc"): +def test_update_data_agent_sync(request_type, transport: str = "grpc"): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3934,27 +3999,31 @@ def test_get_iam_policy(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + with mock.patch.object( + type(client.transport.update_data_agent_sync), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b"etag_blob", + call.return_value = gcg_data_agent.DataAgent( + name="name_value", + display_name="display_name_value", + description="description_value", ) - response = client.get_iam_policy(request) + response = client.update_data_agent_sync(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.GetIamPolicyRequest() + request = data_agent_service.UpdateDataAgentRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b"etag_blob" + assert isinstance(response, gcg_data_agent.DataAgent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" -def test_get_iam_policy_non_empty_request_with_auto_populated_field(): +def test_update_data_agent_sync_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = DataAgentServiceClient( @@ -3965,24 +4034,22 @@ def test_get_iam_policy_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) + request = data_agent_service.UpdateDataAgentRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + with mock.patch.object( + type(client.transport.update_data_agent_sync), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_iam_policy(request=request) + client.update_data_agent_sync(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) + assert args[0] == data_agent_service.UpdateDataAgentRequest() -def test_get_iam_policy_use_cached_wrapped_rpc(): +def test_update_data_agent_sync_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3996,21 +4063,26 @@ def test_get_iam_policy_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_iam_policy in client._transport._wrapped_methods + assert ( + client._transport.update_data_agent_sync + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_data_agent_sync + ] = mock_rpc request = {} - client.get_iam_policy(request) + client.update_data_agent_sync(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_iam_policy(request) + client.update_data_agent_sync(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4018,7 +4090,7 @@ def test_get_iam_policy_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_iam_policy_async_use_cached_wrapped_rpc( +async def test_update_data_agent_sync_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4035,7 +4107,7 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_iam_policy + client._client._transport.update_data_agent_sync in client._client._transport._wrapped_methods ) @@ -4043,16 +4115,16 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_iam_policy + client._client._transport.update_data_agent_sync ] = mock_rpc request = {} - await client.get_iam_policy(request) + await client.update_data_agent_sync(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_iam_policy(request) + await client.update_data_agent_sync(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4060,8 +4132,9 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_iam_policy_async( - transport: str = "grpc_asyncio", request_type=iam_policy_pb2.GetIamPolicyRequest +async def test_update_data_agent_sync_async( + transport: str = "grpc_asyncio", + request_type=data_agent_service.UpdateDataAgentRequest, ): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -4073,48 +4146,54 @@ async def test_get_iam_policy_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + with mock.patch.object( + type(client.transport.update_data_agent_sync), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy( - version=774, - etag=b"etag_blob", + gcg_data_agent.DataAgent( + name="name_value", + display_name="display_name_value", + description="description_value", ) ) - response = await client.get_iam_policy(request) + response = await client.update_data_agent_sync(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.GetIamPolicyRequest() + request = data_agent_service.UpdateDataAgentRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b"etag_blob" + assert isinstance(response, gcg_data_agent.DataAgent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" @pytest.mark.asyncio -async def test_get_iam_policy_async_from_dict(): - await test_get_iam_policy_async(request_type=dict) +async def test_update_data_agent_sync_async_from_dict(): + await test_update_data_agent_sync_async(request_type=dict) -def test_get_iam_policy_field_headers(): +def test_update_data_agent_sync_field_headers(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() + request = data_agent_service.UpdateDataAgentRequest() - request.resource = "resource_value" + request.data_agent.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request) + with mock.patch.object( + type(client.transport.update_data_agent_sync), "__call__" + ) as call: + call.return_value = gcg_data_agent.DataAgent() + client.update_data_agent_sync(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4125,26 +4204,30 @@ def test_get_iam_policy_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource=resource_value", + "data_agent.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): +async def test_update_data_agent_sync_field_headers_async(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() + request = data_agent_service.UpdateDataAgentRequest() - request.resource = "resource_value" + request.data_agent.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.get_iam_policy(request) + with mock.patch.object( + type(client.transport.update_data_agent_sync), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcg_data_agent.DataAgent() + ) + await client.update_data_agent_sync(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4155,52 +4238,53 @@ async def test_get_iam_policy_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource=resource_value", + "data_agent.name=name_value", ) in kw["metadata"] -def test_get_iam_policy_from_dict_foreign(): - client = DataAgentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -def test_get_iam_policy_flattened(): +def test_update_data_agent_sync_flattened(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + with mock.patch.object( + type(client.transport.update_data_agent_sync), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() + call.return_value = gcg_data_agent.DataAgent() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_iam_policy( - resource="resource_value", + client.update_data_agent_sync( + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = "resource_value" + arg = args[0].data_agent + mock_val = gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_get_iam_policy_flattened_error(): +def test_update_data_agent_sync_flattened_error(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4208,41 +4292,68 @@ def test_get_iam_policy_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource="resource_value", + client.update_data_agent_sync( + data_agent_service.UpdateDataAgentRequest(), + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_get_iam_policy_flattened_async(): +async def test_update_data_agent_sync_flattened_async(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + with mock.patch.object( + type(client.transport.update_data_agent_sync), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() + call.return_value = gcg_data_agent.DataAgent() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcg_data_agent.DataAgent() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_iam_policy( - resource="resource_value", + response = await client.update_data_agent_sync( + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = "resource_value" + arg = args[0].data_agent + mock_val = gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_get_iam_policy_flattened_error_async(): +async def test_update_data_agent_sync_flattened_error_async(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -4250,20 +4361,27 @@ async def test_get_iam_policy_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource="resource_value", - ) - - -@pytest.mark.parametrize( - "request_type", + await client.update_data_agent_sync( + data_agent_service.UpdateDataAgentRequest(), + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", [ - iam_policy_pb2.SetIamPolicyRequest, + data_agent_service.DeleteDataAgentRequest, dict, ], ) -def test_set_iam_policy(request_type, transport: str = "grpc"): +def test_delete_data_agent(request_type, transport: str = "grpc"): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4274,27 +4392,24 @@ def test_set_iam_policy(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_data_agent), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b"etag_blob", - ) - response = client.set_iam_policy(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_data_agent(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.SetIamPolicyRequest() + request = data_agent_service.DeleteDataAgentRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b"etag_blob" + assert isinstance(response, future.Future) -def test_set_iam_policy_non_empty_request_with_auto_populated_field(): +def test_delete_data_agent_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = DataAgentServiceClient( @@ -4305,24 +4420,26 @@ def test_set_iam_policy_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", + request = data_agent_service.DeleteDataAgentRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_data_agent), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.set_iam_policy(request=request) + client.delete_data_agent(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", + assert args[0] == data_agent_service.DeleteDataAgentRequest( + name="name_value", ) -def test_set_iam_policy_use_cached_wrapped_rpc(): +def test_delete_data_agent_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4336,21 +4453,28 @@ def test_set_iam_policy_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.set_iam_policy in client._transport._wrapped_methods + assert client._transport.delete_data_agent in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + client._transport._wrapped_methods[ + client._transport.delete_data_agent + ] = mock_rpc request = {} - client.set_iam_policy(request) + client.delete_data_agent(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.set_iam_policy(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_data_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4358,7 +4482,7 @@ def test_set_iam_policy_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_set_iam_policy_async_use_cached_wrapped_rpc( +async def test_delete_data_agent_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4375,7 +4499,7 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.set_iam_policy + client._client._transport.delete_data_agent in client._client._transport._wrapped_methods ) @@ -4383,16 +4507,21 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.set_iam_policy + client._client._transport.delete_data_agent ] = mock_rpc request = {} - await client.set_iam_policy(request) + await client.delete_data_agent(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.set_iam_policy(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_data_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4400,8 +4529,9 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_set_iam_policy_async( - transport: str = "grpc_asyncio", request_type=iam_policy_pb2.SetIamPolicyRequest +async def test_delete_data_agent_async( + transport: str = "grpc_asyncio", + request_type=data_agent_service.DeleteDataAgentRequest, ): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -4413,48 +4543,47 @@ async def test_set_iam_policy_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_data_agent), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy( - version=774, - etag=b"etag_blob", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.set_iam_policy(request) + response = await client.delete_data_agent(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.SetIamPolicyRequest() + request = data_agent_service.DeleteDataAgentRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b"etag_blob" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_set_iam_policy_async_from_dict(): - await test_set_iam_policy_async(request_type=dict) +async def test_delete_data_agent_async_from_dict(): + await test_delete_data_agent_async(request_type=dict) -def test_set_iam_policy_field_headers(): +def test_delete_data_agent_field_headers(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() + request = data_agent_service.DeleteDataAgentRequest() - request.resource = "resource_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request) + with mock.patch.object( + type(client.transport.delete_data_agent), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_data_agent(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4465,26 +4594,30 @@ def test_set_iam_policy_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource=resource_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): +async def test_delete_data_agent_field_headers_async(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() + request = data_agent_service.DeleteDataAgentRequest() - request.resource = "resource_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.set_iam_policy(request) + with mock.patch.object( + type(client.transport.delete_data_agent), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_data_agent(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4495,53 +4628,37 @@ async def test_set_iam_policy_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource=resource_value", + "name=name_value", ) in kw["metadata"] -def test_set_iam_policy_from_dict_foreign(): - client = DataAgentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - "update_mask": field_mask_pb2.FieldMask(paths=["paths_value"]), - } - ) - call.assert_called() - - -def test_set_iam_policy_flattened(): +def test_delete_data_agent_flattened(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_data_agent), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.set_iam_policy( - resource="resource_value", + client.delete_data_agent( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = "resource_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_set_iam_policy_flattened_error(): +def test_delete_data_agent_flattened_error(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4549,41 +4666,45 @@ def test_set_iam_policy_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource="resource_value", + client.delete_data_agent( + data_agent_service.DeleteDataAgentRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_set_iam_policy_flattened_async(): +async def test_delete_data_agent_flattened_async(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_data_agent), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() + call.return_value = operations_pb2.Operation(name="operations/op") - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.set_iam_policy( - resource="resource_value", + response = await client.delete_data_agent( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = "resource_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_set_iam_policy_flattened_error_async(): +async def test_delete_data_agent_flattened_error_async(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -4591,283 +4712,84 @@ async def test_set_iam_policy_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource="resource_value", + await client.delete_data_agent( + data_agent_service.DeleteDataAgentRequest(), + name="name_value", ) -def test_list_data_agents_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataAgentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() +@pytest.mark.parametrize( + "request_type", + [ + data_agent_service.DeleteDataAgentRequest, + dict, + ], +) +def test_delete_data_agent_sync(request_type, transport: str = "grpc"): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Ensure method has been cached - assert client._transport.list_data_agents in client._transport._wrapped_methods + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.list_data_agents - ] = mock_rpc - - request = {} - client.list_data_agents(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_agent_sync), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_data_agent_sync(request) # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_data_agents(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_data_agents_rest_required_fields( - request_type=data_agent_service.ListDataAgentsRequest, -): - transport_class = transports.DataAgentServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_data_agents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_data_agents._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - "show_deleted", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = DataAgentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = data_agent_service.ListDataAgentsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = data_agent_service.ListDataAgentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_data_agents(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_data_agents_rest_unset_required_fields(): - transport = transports.DataAgentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_data_agents._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - "showDeleted", - ) - ) - & set(("parent",)) - ) - - -def test_list_data_agents_rest_flattened(): - client = DataAgentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = data_agent_service.ListDataAgentsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = data_agent_service.ListDataAgentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_data_agents(**mock_args) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_agent_service.DeleteDataAgentRequest() + assert args[0] == request - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1beta/{parent=projects/*/locations/*}/dataAgents" - % client.transport._host, - args[1], - ) + # Establish that the response is the type that we expect. + assert response is None -def test_list_data_agents_rest_flattened_error(transport: str = "rest"): +def test_delete_data_agent_sync_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="grpc", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_data_agents( - data_agent_service.ListDataAgentsRequest(), - parent="parent_value", - ) - - -def test_list_data_agents_rest_pager(transport: str = "rest"): - client = DataAgentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_agent_service.DeleteDataAgentRequest( + name="name_value", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - data_agent_service.ListDataAgentsResponse( - data_agents=[ - data_agent.DataAgent(), - data_agent.DataAgent(), - data_agent.DataAgent(), - ], - next_page_token="abc", - ), - data_agent_service.ListDataAgentsResponse( - data_agents=[], - next_page_token="def", - ), - data_agent_service.ListDataAgentsResponse( - data_agents=[ - data_agent.DataAgent(), - ], - next_page_token="ghi", - ), - data_agent_service.ListDataAgentsResponse( - data_agents=[ - data_agent.DataAgent(), - data_agent.DataAgent(), - ], - ), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_agent_sync), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - data_agent_service.ListDataAgentsResponse.to_json(x) for x in response + client.delete_data_agent_sync(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_agent_service.DeleteDataAgentRequest( + name="name_value", ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_data_agents(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, data_agent.DataAgent) for i in results) - - pages = list(client.list_data_agents(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token -def test_list_accessible_data_agents_rest_use_cached_wrapped_rpc(): +def test_delete_data_agent_sync_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -4876,7 +4798,7 @@ def test_list_accessible_data_agents_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_accessible_data_agents + client._transport.delete_data_agent_sync in client._transport._wrapped_methods ) @@ -4886,262 +4808,320 @@ def test_list_accessible_data_agents_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_accessible_data_agents + client._transport.delete_data_agent_sync ] = mock_rpc - request = {} - client.list_accessible_data_agents(request) + client.delete_data_agent_sync(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_accessible_data_agents(request) + client.delete_data_agent_sync(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_accessible_data_agents_rest_required_fields( - request_type=data_agent_service.ListAccessibleDataAgentsRequest, +@pytest.mark.asyncio +async def test_delete_data_agent_sync_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.DataAgentServiceRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.delete_data_agent_sync + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_accessible_data_agents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_data_agent_sync + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.delete_data_agent_sync(request) - jsonified_request["parent"] = "parent_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_accessible_data_agents._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "creator_filter", - "filter", - "order_by", - "page_size", - "page_token", - "show_deleted", - ) + await client.delete_data_agent_sync(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_data_agent_sync_async( + transport: str = "grpc_asyncio", + request_type=data_agent_service.DeleteDataAgentRequest, +): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_agent_sync), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_data_agent_sync(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_agent_service.DeleteDataAgentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +@pytest.mark.asyncio +async def test_delete_data_agent_sync_async_from_dict(): + await test_delete_data_agent_sync_async(request_type=dict) + + +def test_delete_data_agent_sync_field_headers(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = data_agent_service.ListAccessibleDataAgentsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_agent_service.DeleteDataAgentRequest() - # Convert return value to protobuf type - return_value = data_agent_service.ListAccessibleDataAgentsResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) + request.name = "name_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_agent_sync), "__call__" + ) as call: + call.return_value = None + client.delete_data_agent_sync(request) - response = client.list_accessible_data_agents(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_list_accessible_data_agents_rest_unset_required_fields(): - transport = transports.DataAgentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_delete_data_agent_sync_field_headers_async(): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.list_accessible_data_agents._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "creatorFilter", - "filter", - "orderBy", - "pageSize", - "pageToken", - "showDeleted", - ) - ) - & set(("parent",)) - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_agent_service.DeleteDataAgentRequest() + request.name = "name_value" -def test_list_accessible_data_agents_rest_flattened(): - client = DataAgentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_agent_sync), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_data_agent_sync(request) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = data_agent_service.ListAccessibleDataAgentsResponse() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = data_agent_service.ListAccessibleDataAgentsResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} +def test_delete_data_agent_sync_flattened(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - client.list_accessible_data_agents(**mock_args) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_agent_sync), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_data_agent_sync( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1beta/{parent=projects/*/locations/*}/dataAgents:listAccessible" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_list_accessible_data_agents_rest_flattened_error(transport: str = "rest"): +def test_delete_data_agent_sync_flattened_error(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_accessible_data_agents( - data_agent_service.ListAccessibleDataAgentsRequest(), - parent="parent_value", + client.delete_data_agent_sync( + data_agent_service.DeleteDataAgentRequest(), + name="name_value", ) -def test_list_accessible_data_agents_rest_pager(transport: str = "rest"): - client = DataAgentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +@pytest.mark.asyncio +async def test_delete_data_agent_sync_flattened_async(): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - data_agent_service.ListAccessibleDataAgentsResponse( - data_agents=[ - data_agent.DataAgent(), - data_agent.DataAgent(), - data_agent.DataAgent(), - ], - next_page_token="abc", - ), - data_agent_service.ListAccessibleDataAgentsResponse( - data_agents=[], - next_page_token="def", - ), - data_agent_service.ListAccessibleDataAgentsResponse( - data_agents=[ - data_agent.DataAgent(), - ], - next_page_token="ghi", - ), - data_agent_service.ListAccessibleDataAgentsResponse( - data_agents=[ - data_agent.DataAgent(), - data_agent.DataAgent(), - ], - ), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_agent_sync), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_data_agent_sync( + name="name_value", ) - # Two responses for two calls - response = response + response - # Wrap the values into proper Response objs - response = tuple( - data_agent_service.ListAccessibleDataAgentsResponse.to_json(x) - for x in response + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_data_agent_sync_flattened_error_async(): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_data_agent_sync( + data_agent_service.DeleteDataAgentRequest(), + name="name_value", ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - sample_request = {"parent": "projects/sample1/locations/sample2"} - pager = client.list_accessible_data_agents(request=sample_request) +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy(request_type, transport: str = "grpc"): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, data_agent.DataAgent) for i in results) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - pages = list(client.list_accessible_data_agents(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.GetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" -def test_get_data_agent_rest_use_cached_wrapped_rpc(): +def test_get_iam_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_iam_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + +def test_get_iam_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -5149,179 +5129,339 @@ def test_get_data_agent_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_data_agent in client._transport._wrapped_methods + assert client._transport.get_iam_policy in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_data_agent] = mock_rpc - + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc request = {} - client.get_data_agent(request) + client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_data_agent(request) + client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_data_agent_rest_required_fields( - request_type=data_agent_service.GetDataAgentRequest, +@pytest.mark.asyncio +async def test_get_iam_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.DataAgentServiceRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.get_iam_policy + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_data_agent._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_iam_policy + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.get_iam_policy(request) - jsonified_request["name"] = "name_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_data_agent._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + await client.get_iam_policy(request) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - client = DataAgentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = data_agent.DataAgent() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result +@pytest.mark.asyncio +async def test_get_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - response_value = Response() - response_value.status_code = 200 + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Convert return value to protobuf type - return_value = data_agent.DataAgent.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.get_iam_policy(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.GetIamPolicyRequest() + assert args[0] == request - response = client.get_data_agent(request) + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) -def test_get_data_agent_rest_unset_required_fields(): - transport = transports.DataAgentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials + +def test_get_iam_policy_field_headers(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.get_data_agent._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = "resource_value" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request) -def test_get_data_agent_rest_flattened(): + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict_foreign(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = data_agent.DataAgent() - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/dataAgents/sample3" - } +def test_get_iam_policy_flattened(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - # get truthy value for each flattened field - mock_args = dict( - name="name_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_iam_policy( + resource="resource_value", ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = data_agent.DataAgent.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val - client.get_data_agent(**mock_args) + +def test_get_iam_policy_flattened_error(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource="resource_value", + ) + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_async(): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_iam_policy( + resource="resource_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1beta/{name=projects/*/locations/*/dataAgents/*}" - % client.transport._host, - args[1], + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_error_async(): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource="resource_value", ) -def test_get_data_agent_rest_flattened_error(transport: str = "rest"): +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy(request_type, transport: str = "grpc"): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_agent( - data_agent_service.GetDataAgentRequest(), - name="name_value", + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", ) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.SetIamPolicyRequest() + assert args[0] == request -def test_create_data_agent_rest_use_cached_wrapped_rpc(): + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.set_iam_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + +def test_set_iam_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -5329,209 +5469,268 @@ def test_create_data_agent_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_data_agent in client._transport._wrapped_methods + assert client._transport.set_iam_policy in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_data_agent - ] = mock_rpc - + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc request = {} - client.create_data_agent(request) + client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_set_iam_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 wrapper_fn.reset_mock() - client.create_data_agent(request) + # Ensure method has been cached + assert ( + client._client._transport.set_iam_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.set_iam_policy + ] = mock_rpc + + request = {} + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_data_agent_rest_required_fields( - request_type=data_agent_service.CreateDataAgentRequest, +@pytest.mark.asyncio +async def test_set_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.SetIamPolicyRequest ): - transport_class = transports.DataAgentServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - # verify fields with default values are dropped + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_data_agent._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) - # verify required fields with default values are now present + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.SetIamPolicyRequest() + assert args[0] == request - jsonified_request["parent"] = "parent_value" + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_data_agent._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "data_agent_id", - "request_id", - ) - ) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + +def test_set_iam_policy_field_headers(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + request.resource = "resource_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request) - response = client.create_data_agent(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] -def test_create_data_agent_rest_unset_required_fields(): - transport = transports.DataAgentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.create_data_agent._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "dataAgentId", - "requestId", - ) - ) - & set( - ( - "parent", - "dataAgent", - ) - ) - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource_value" -def test_create_data_agent_rest_flattened(): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict_foreign(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + "update_mask": field_mask_pb2.FieldMask(paths=["paths_value"]), + } + ) + call.assert_called() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} +def test_set_iam_policy_flattened(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - data_agent=gcg_data_agent.DataAgent( - data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( - staging_context=context.Context( - system_instruction="system_instruction_value" - ) - ) - ), - data_agent_id="data_agent_id_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_iam_policy( + resource="resource_value", ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_data_agent(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1beta/{parent=projects/*/locations/*}/dataAgents" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val -def test_create_data_agent_rest_flattened_error(transport: str = "rest"): +def test_set_iam_policy_flattened_error(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_data_agent( - data_agent_service.CreateDataAgentRequest(), - parent="parent_value", - data_agent=gcg_data_agent.DataAgent( - data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( - staging_context=context.Context( - system_instruction="system_instruction_value" - ) - ) - ), - data_agent_id="data_agent_id_value", + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource="resource_value", ) -def test_update_data_agent_rest_use_cached_wrapped_rpc(): +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_async(): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_iam_policy( + resource="resource_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_error_async(): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource="resource_value", + ) + + +def test_list_data_agents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5545,7 +5744,7 @@ def test_update_data_agent_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_data_agent in client._transport._wrapped_methods + assert client._transport.list_data_agents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -5553,32 +5752,29 @@ def test_update_data_agent_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_data_agent + client._transport.list_data_agents ] = mock_rpc request = {} - client.update_data_agent(request) + client.list_data_agents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_data_agent(request) + client.list_data_agents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_data_agent_rest_required_fields( - request_type=data_agent_service.UpdateDataAgentRequest, +def test_list_data_agents_rest_required_fields( + request_type=data_agent_service.ListDataAgentsRequest, ): transport_class = transports.DataAgentServiceRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5589,24 +5785,31 @@ def test_update_data_agent_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_data_agent._get_unset_required_fields(jsonified_request) + ).list_data_agents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_data_agent._get_unset_required_fields(jsonified_request) + ).list_data_agents._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "request_id", - "update_mask", + "filter", + "order_by", + "page_size", + "page_token", + "show_deleted", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5615,7 +5818,7 @@ def test_update_data_agent_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = data_agent_service.ListDataAgentsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5627,45 +5830,50 @@ def test_update_data_agent_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_agent_service.ListDataAgentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_data_agent(request) + response = client.list_data_agents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_data_agent_rest_unset_required_fields(): +def test_list_data_agents_rest_unset_required_fields(): transport = transports.DataAgentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_data_agent._get_unset_required_fields({}) + unset_fields = transport.list_data_agents._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "requestId", - "updateMask", + "filter", + "orderBy", + "pageSize", + "pageToken", + "showDeleted", ) ) - & set(("dataAgent",)) + & set(("parent",)) ) -def test_update_data_agent_rest_flattened(): +def test_list_data_agents_rest_flattened(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5674,50 +5882,41 @@ def test_update_data_agent_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = data_agent_service.ListDataAgentsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "data_agent": { - "name": "projects/sample1/locations/sample2/dataAgents/sample3" - } - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - data_agent=gcg_data_agent.DataAgent( - data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( - staging_context=context.Context( - system_instruction="system_instruction_value" - ) - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_agent_service.ListDataAgentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_data_agent(**mock_args) + client.list_data_agents(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta/{data_agent.name=projects/*/locations/*/dataAgents/*}" + "%s/v1beta/{parent=projects/*/locations/*}/dataAgents" % client.transport._host, args[1], ) -def test_update_data_agent_rest_flattened_error(transport: str = "rest"): +def test_list_data_agents_rest_flattened_error(transport: str = "rest"): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5726,20 +5925,76 @@ def test_update_data_agent_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_data_agent( - data_agent_service.UpdateDataAgentRequest(), - data_agent=gcg_data_agent.DataAgent( - data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( - staging_context=context.Context( - system_instruction="system_instruction_value" - ) - ) + client.list_data_agents( + data_agent_service.ListDataAgentsRequest(), + parent="parent_value", + ) + + +def test_list_data_agents_rest_pager(transport: str = "rest"): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + data_agent_service.ListDataAgentsResponse( + data_agents=[ + data_agent.DataAgent(), + data_agent.DataAgent(), + data_agent.DataAgent(), + ], + next_page_token="abc", ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + data_agent_service.ListDataAgentsResponse( + data_agents=[], + next_page_token="def", + ), + data_agent_service.ListDataAgentsResponse( + data_agents=[ + data_agent.DataAgent(), + ], + next_page_token="ghi", + ), + data_agent_service.ListDataAgentsResponse( + data_agents=[ + data_agent.DataAgent(), + data_agent.DataAgent(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + data_agent_service.ListDataAgentsResponse.to_json(x) for x in response ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + sample_request = {"parent": "projects/sample1/locations/sample2"} -def test_delete_data_agent_rest_use_cached_wrapped_rpc(): + pager = client.list_data_agents(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, data_agent.DataAgent) for i in results) + + pages = list(client.list_data_agents(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_list_accessible_data_agents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5753,7 +6008,10 @@ def test_delete_data_agent_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_data_agent in client._transport._wrapped_methods + assert ( + client._transport.list_accessible_data_agents + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -5761,33 +6019,29 @@ def test_delete_data_agent_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_data_agent + client._transport.list_accessible_data_agents ] = mock_rpc request = {} - client.delete_data_agent(request) + client.list_accessible_data_agents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_data_agent(request) + client.list_accessible_data_agents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_data_agent_rest_required_fields( - request_type=data_agent_service.DeleteDataAgentRequest, +def test_list_accessible_data_agents_rest_required_fields( + request_type=data_agent_service.ListAccessibleDataAgentsRequest, ): transport_class = transports.DataAgentServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5798,23 +6052,32 @@ def test_delete_data_agent_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_data_agent._get_unset_required_fields(jsonified_request) + ).list_accessible_data_agents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_data_agent._get_unset_required_fields(jsonified_request) + ).list_accessible_data_agents._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set( + ( + "creator_filter", + "filter", + "order_by", + "page_size", + "page_token", + "show_deleted", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5823,7 +6086,7 @@ def test_delete_data_agent_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = data_agent_service.ListAccessibleDataAgentsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5835,36 +6098,53 @@ def test_delete_data_agent_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_agent_service.ListAccessibleDataAgentsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_data_agent(request) + response = client.list_accessible_data_agents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_data_agent_rest_unset_required_fields(): +def test_list_accessible_data_agents_rest_unset_required_fields(): transport = transports.DataAgentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_data_agent._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + unset_fields = transport.list_accessible_data_agents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "creatorFilter", + "filter", + "orderBy", + "pageSize", + "pageToken", + "showDeleted", + ) + ) + & set(("parent",)) + ) -def test_delete_data_agent_rest_flattened(): +def test_list_accessible_data_agents_rest_flattened(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5873,41 +6153,43 @@ def test_delete_data_agent_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = data_agent_service.ListAccessibleDataAgentsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/dataAgents/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_agent_service.ListAccessibleDataAgentsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_data_agent(**mock_args) + client.list_accessible_data_agents(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta/{name=projects/*/locations/*/dataAgents/*}" + "%s/v1beta/{parent=projects/*/locations/*}/dataAgents:listAccessible" % client.transport._host, args[1], ) -def test_delete_data_agent_rest_flattened_error(transport: str = "rest"): +def test_list_accessible_data_agents_rest_flattened_error(transport: str = "rest"): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5916,13 +6198,77 @@ def test_delete_data_agent_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_data_agent( - data_agent_service.DeleteDataAgentRequest(), - name="name_value", + client.list_accessible_data_agents( + data_agent_service.ListAccessibleDataAgentsRequest(), + parent="parent_value", ) -def test_get_iam_policy_rest_use_cached_wrapped_rpc(): +def test_list_accessible_data_agents_rest_pager(transport: str = "rest"): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + data_agent_service.ListAccessibleDataAgentsResponse( + data_agents=[ + data_agent.DataAgent(), + data_agent.DataAgent(), + data_agent.DataAgent(), + ], + next_page_token="abc", + ), + data_agent_service.ListAccessibleDataAgentsResponse( + data_agents=[], + next_page_token="def", + ), + data_agent_service.ListAccessibleDataAgentsResponse( + data_agents=[ + data_agent.DataAgent(), + ], + next_page_token="ghi", + ), + data_agent_service.ListAccessibleDataAgentsResponse( + data_agents=[ + data_agent.DataAgent(), + data_agent.DataAgent(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + data_agent_service.ListAccessibleDataAgentsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_accessible_data_agents(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, data_agent.DataAgent) for i in results) + + pages = list(client.list_accessible_data_agents(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_data_agent_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5936,37 +6282,37 @@ def test_get_iam_policy_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_iam_policy in client._transport._wrapped_methods + assert client._transport.get_data_agent in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + client._transport._wrapped_methods[client._transport.get_data_agent] = mock_rpc request = {} - client.get_iam_policy(request) + client.get_data_agent(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_iam_policy(request) + client.get_data_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_iam_policy_rest_required_fields( - request_type=iam_policy_pb2.GetIamPolicyRequest, +def test_get_data_agent_rest_required_fields( + request_type=data_agent_service.GetDataAgentRequest, ): transport_class = transports.DataAgentServiceRestTransport request_init = {} - request_init["resource"] = "" + request_init["name"] = "" request = request_type(**request_init) - pb_request = request + pb_request = request_type.pb(request) jsonified_request = json.loads( json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) @@ -5975,21 +6321,21 @@ def test_get_iam_policy_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_iam_policy._get_unset_required_fields(jsonified_request) + ).get_data_agent._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["resource"] = "resource_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_iam_policy._get_unset_required_fields(jsonified_request) + ).get_data_agent._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == "resource_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5998,7 +6344,7 @@ def test_get_iam_policy_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() + return_value = data_agent.DataAgent() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6007,41 +6353,42 @@ def test_get_iam_policy_rest_required_fields( with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. - pb_request = request + pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_agent.DataAgent.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_iam_policy(request) + response = client.get_data_agent(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_iam_policy_rest_unset_required_fields(): +def test_get_data_agent_rest_unset_required_fields(): transport = transports.DataAgentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_iam_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("resource",))) + unset_fields = transport.get_data_agent._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_iam_policy_rest_flattened(): +def test_get_data_agent_rest_flattened(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6050,41 +6397,43 @@ def test_get_iam_policy_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() + return_value = data_agent.DataAgent() # get arguments that satisfy an http rule for this method sample_request = { - "resource": "projects/sample1/locations/sample2/dataAgents/sample3" + "name": "projects/sample1/locations/sample2/dataAgents/sample3" } # get truthy value for each flattened field mock_args = dict( - resource="resource_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_agent.DataAgent.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_iam_policy(**mock_args) + client.get_data_agent(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta/{resource=projects/*/locations/*/dataAgents/*}:getIamPolicy" + "%s/v1beta/{name=projects/*/locations/*/dataAgents/*}" % client.transport._host, args[1], ) -def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): +def test_get_data_agent_rest_flattened_error(transport: str = "rest"): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6093,13 +6442,13 @@ def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource="resource_value", + client.get_data_agent( + data_agent_service.GetDataAgentRequest(), + name="name_value", ) -def test_set_iam_policy_rest_use_cached_wrapped_rpc(): +def test_create_data_agent_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6113,37 +6462,43 @@ def test_set_iam_policy_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.set_iam_policy in client._transport._wrapped_methods + assert client._transport.create_data_agent in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_data_agent + ] = mock_rpc request = {} - client.set_iam_policy(request) + client.create_data_agent(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.set_iam_policy(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_data_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_set_iam_policy_rest_required_fields( - request_type=iam_policy_pb2.SetIamPolicyRequest, +def test_create_data_agent_rest_required_fields( + request_type=data_agent_service.CreateDataAgentRequest, ): transport_class = transports.DataAgentServiceRestTransport request_init = {} - request_init["resource"] = "" + request_init["parent"] = "" request = request_type(**request_init) - pb_request = request + pb_request = request_type.pb(request) jsonified_request = json.loads( json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) @@ -6152,21 +6507,28 @@ def test_set_iam_policy_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_iam_policy._get_unset_required_fields(jsonified_request) + ).create_data_agent._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["resource"] = "resource_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_iam_policy._get_unset_required_fields(jsonified_request) + ).create_data_agent._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "data_agent_id", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == "resource_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6175,7 +6537,7 @@ def test_set_iam_policy_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6184,7 +6546,7 @@ def test_set_iam_policy_rest_required_fields( with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. - pb_request = request + pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", "method": "post", @@ -6195,38 +6557,42 @@ def test_set_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.set_iam_policy(request) + response = client.create_data_agent(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_set_iam_policy_rest_unset_required_fields(): +def test_create_data_agent_rest_unset_required_fields(): transport = transports.DataAgentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + unset_fields = transport.create_data_agent._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set( + ( + "dataAgentId", + "requestId", + ) + ) & set( ( - "resource", - "policy", + "parent", + "dataAgent", ) ) ) -def test_set_iam_policy_rest_flattened(): +def test_create_data_agent_rest_flattened(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6235,16 +6601,22 @@ def test_set_iam_policy_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "resource": "projects/sample1/locations/sample2/dataAgents/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - resource="resource_value", + parent="parent_value", + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + data_agent_id="data_agent_id_value", ) mock_args.update(sample_request) @@ -6256,20 +6628,20 @@ def test_set_iam_policy_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.set_iam_policy(**mock_args) + client.create_data_agent(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta/{resource=projects/*/locations/*/dataAgents/*}:setIamPolicy" + "%s/v1beta/{parent=projects/*/locations/*}/dataAgents" % client.transport._host, args[1], ) -def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): +def test_create_data_agent_rest_flattened_error(transport: str = "rest"): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6278,168 +6650,1543 @@ def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource="resource_value", + client.create_data_agent( + data_agent_service.CreateDataAgentRequest(), + parent="parent_value", + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + data_agent_id="data_agent_id_value", ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DataAgentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): +def test_create_data_agent_sync_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.DataAgentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataAgentServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + transport="rest", ) - # It is an error to provide an api_key and a transport instance. - transport = transports.DataAgentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataAgentServiceClient( - client_options=options, - transport=transport, - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataAgentServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() + # Ensure method has been cached + assert ( + client._transport.create_data_agent_sync + in client._transport._wrapped_methods ) - # It is an error to provide scopes and a transport instance. - transport = transports.DataAgentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataAgentServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client._transport._wrapped_methods[ + client._transport.create_data_agent_sync + ] = mock_rpc + request = {} + client.create_data_agent_sync(request) -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataAgentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = DataAgentServiceClient(transport=transport) - assert client.transport is transport + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.create_data_agent_sync(request) -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataAgentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - transport = transports.DataAgentServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel +def test_create_data_agent_sync_rest_required_fields( + request_type=data_agent_service.CreateDataAgentRequest, +): + transport_class = transports.DataAgentServiceRestTransport -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataAgentServiceGrpcTransport, - transports.DataAgentServiceGrpcAsyncIOTransport, - transports.DataAgentServiceRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + # verify fields with default values are dropped -def test_transport_kind_grpc(): - transport = DataAgentServiceClient.get_transport_class("grpc")( + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" + ).create_data_agent_sync._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" -def test_initialize_client_w_grpc(): - client = DataAgentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_data_agent_sync._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "data_agent_id", + "request_id", + ) ) - assert client is not None + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_data_agents_empty_call_grpc(): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_data_agents), "__call__") as call: - call.return_value = data_agent_service.ListDataAgentsResponse() - client.list_data_agents(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_agent_service.ListDataAgentsRequest() - - assert args[0] == request_msg + # Designate an appropriate value for the returned response. + return_value = gcg_data_agent.DataAgent() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_accessible_data_agents_empty_call_grpc(): - client = DataAgentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Convert return value to protobuf type + return_value = gcg_data_agent.DataAgent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_accessible_data_agents), "__call__" - ) as call: - call.return_value = data_agent_service.ListAccessibleDataAgentsResponse() - client.list_accessible_data_agents(request=None) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_agent_service.ListAccessibleDataAgentsRequest() + response = client.create_data_agent_sync(request) - assert args[0] == request_msg + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_data_agent_empty_call_grpc(): - client = DataAgentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", +def test_create_data_agent_sync_rest_unset_required_fields(): + transport = transports.DataAgentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_data_agent_sync._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "dataAgentId", + "requestId", + ) + ) + & set( + ( + "parent", + "dataAgent", + ) + ) + ) + + +def test_create_data_agent_sync_rest_flattened(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcg_data_agent.DataAgent() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + data_agent_id="data_agent_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcg_data_agent.DataAgent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_data_agent_sync(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*}/dataAgents:createSync" + % client.transport._host, + args[1], + ) + + +def test_create_data_agent_sync_rest_flattened_error(transport: str = "rest"): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_data_agent_sync( + data_agent_service.CreateDataAgentRequest(), + parent="parent_value", + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + data_agent_id="data_agent_id_value", + ) + + +def test_update_data_agent_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_data_agent in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_data_agent + ] = mock_rpc + + request = {} + client.update_data_agent(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_data_agent(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_data_agent_rest_required_fields( + request_type=data_agent_service.UpdateDataAgentRequest, +): + transport_class = transports.DataAgentServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_data_agent._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_data_agent._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_data_agent(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_data_agent_rest_unset_required_fields(): + transport = transports.DataAgentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_data_agent._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set(("dataAgent",)) + ) + + +def test_update_data_agent_rest_flattened(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "data_agent": { + "name": "projects/sample1/locations/sample2/dataAgents/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_data_agent(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{data_agent.name=projects/*/locations/*/dataAgents/*}" + % client.transport._host, + args[1], + ) + + +def test_update_data_agent_rest_flattened_error(transport: str = "rest"): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_agent( + data_agent_service.UpdateDataAgentRequest(), + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_data_agent_sync_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_data_agent_sync + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_data_agent_sync + ] = mock_rpc + + request = {} + client.update_data_agent_sync(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_data_agent_sync(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_data_agent_sync_rest_required_fields( + request_type=data_agent_service.UpdateDataAgentRequest, +): + transport_class = transports.DataAgentServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_data_agent_sync._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_data_agent_sync._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcg_data_agent.DataAgent() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcg_data_agent.DataAgent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_data_agent_sync(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_data_agent_sync_rest_unset_required_fields(): + transport = transports.DataAgentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_data_agent_sync._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set(("dataAgent",)) + ) + + +def test_update_data_agent_sync_rest_flattened(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcg_data_agent.DataAgent() + + # get arguments that satisfy an http rule for this method + sample_request = { + "data_agent": { + "name": "projects/sample1/locations/sample2/dataAgents/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcg_data_agent.DataAgent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_data_agent_sync(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{data_agent.name=projects/*/locations/*/dataAgents/*}:updateSync" + % client.transport._host, + args[1], + ) + + +def test_update_data_agent_sync_rest_flattened_error(transport: str = "rest"): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_agent_sync( + data_agent_service.UpdateDataAgentRequest(), + data_agent=gcg_data_agent.DataAgent( + data_analytics_agent=data_analytics_agent.DataAnalyticsAgent( + staging_context=context.Context( + system_instruction="system_instruction_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_data_agent_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_data_agent in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_data_agent + ] = mock_rpc + + request = {} + client.delete_data_agent(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_data_agent(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_data_agent_rest_required_fields( + request_type=data_agent_service.DeleteDataAgentRequest, +): + transport_class = transports.DataAgentServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_data_agent._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_data_agent._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_data_agent(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_data_agent_rest_unset_required_fields(): + transport = transports.DataAgentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_data_agent._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +def test_delete_data_agent_rest_flattened(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataAgents/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_data_agent(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/dataAgents/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_data_agent_rest_flattened_error(transport: str = "rest"): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_data_agent( + data_agent_service.DeleteDataAgentRequest(), + name="name_value", + ) + + +def test_delete_data_agent_sync_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_data_agent_sync + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_data_agent_sync + ] = mock_rpc + + request = {} + client.delete_data_agent_sync(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_data_agent_sync(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_data_agent_sync_rest_required_fields( + request_type=data_agent_service.DeleteDataAgentRequest, +): + transport_class = transports.DataAgentServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_data_agent_sync._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_data_agent_sync._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_data_agent_sync(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_data_agent_sync_rest_unset_required_fields(): + transport = transports.DataAgentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_data_agent_sync._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +def test_delete_data_agent_sync_rest_flattened(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataAgents/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_data_agent_sync(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/dataAgents/*}:deleteSync" + % client.transport._host, + args[1], + ) + + +def test_delete_data_agent_sync_rest_flattened_error(transport: str = "rest"): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_data_agent_sync( + data_agent_service.DeleteDataAgentRequest(), + name="name_value", + ) + + +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_iam_policy_rest_required_fields( + request_type=iam_policy_pb2.GetIamPolicyRequest, +): + transport_class = transports.DataAgentServiceRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_iam_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.DataAgentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("resource",))) + + +def test_get_iam_policy_rest_flattened(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "resource": "projects/sample1/locations/sample2/dataAgents/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + resource="resource_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{resource=projects/*/locations/*/dataAgents/*}:getIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource="resource_value", + ) + + +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_iam_policy_rest_required_fields( + request_type=iam_policy_pb2.SetIamPolicyRequest, +): + transport_class = transports.DataAgentServiceRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_iam_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.DataAgentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "resource", + "policy", + ) + ) + ) + + +def test_set_iam_policy_rest_flattened(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "resource": "projects/sample1/locations/sample2/dataAgents/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + resource="resource_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{resource=projects/*/locations/*/dataAgents/*}:setIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource="resource_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DataAgentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DataAgentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataAgentServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DataAgentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataAgentServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataAgentServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DataAgentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataAgentServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataAgentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DataAgentServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataAgentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DataAgentServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataAgentServiceGrpcTransport, + transports.DataAgentServiceGrpcAsyncIOTransport, + transports.DataAgentServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = DataAgentServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_data_agents_empty_call_grpc(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_data_agents), "__call__") as call: + call.return_value = data_agent_service.ListDataAgentsResponse() + client.list_data_agents(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_agent_service.ListDataAgentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_accessible_data_agents_empty_call_grpc(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_accessible_data_agents), "__call__" + ) as call: + call.return_value = data_agent_service.ListAccessibleDataAgentsResponse() + client.list_accessible_data_agents(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_agent_service.ListAccessibleDataAgentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_data_agent_empty_call_grpc(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call, and fake the request. @@ -6478,6 +8225,29 @@ def test_create_data_agent_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_data_agent_sync_empty_call_grpc(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_agent_sync), "__call__" + ) as call: + call.return_value = gcg_data_agent.DataAgent() + client.create_data_agent_sync(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_agent_service.CreateDataAgentRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_update_data_agent_empty_call_grpc(): @@ -6501,6 +8271,29 @@ def test_update_data_agent_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_data_agent_sync_empty_call_grpc(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_agent_sync), "__call__" + ) as call: + call.return_value = gcg_data_agent.DataAgent() + client.update_data_agent_sync(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_agent_service.UpdateDataAgentRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_delete_data_agent_empty_call_grpc(): @@ -6524,6 +8317,29 @@ def test_delete_data_agent_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_data_agent_sync_empty_call_grpc(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_agent_sync), "__call__" + ) as call: + call.return_value = None + client.delete_data_agent_sync(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_agent_service.DeleteDataAgentRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_get_iam_policy_empty_call_grpc(): @@ -6628,12 +8444,126 @@ async def test_list_accessible_data_agents_empty_call_grpc_asyncio(): unreachable=["unreachable_value"], ) ) - await client.list_accessible_data_agents(request=None) + await client.list_accessible_data_agents(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_agent_service.ListAccessibleDataAgentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_data_agent_empty_call_grpc_asyncio(): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_data_agent), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_agent.DataAgent( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + ) + await client.get_data_agent(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_agent_service.GetDataAgentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_data_agent_empty_call_grpc_asyncio(): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_agent), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_data_agent(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_agent_service.CreateDataAgentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_data_agent_sync_empty_call_grpc_asyncio(): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_agent_sync), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcg_data_agent.DataAgent( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + ) + await client.create_data_agent_sync(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_agent_service.CreateDataAgentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_data_agent_empty_call_grpc_asyncio(): + client = DataAgentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_agent), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_data_agent(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = data_agent_service.ListAccessibleDataAgentsRequest() + request_msg = data_agent_service.UpdateDataAgentRequest() assert args[0] == request_msg @@ -6641,28 +8571,30 @@ async def test_list_accessible_data_agents_empty_call_grpc_asyncio(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_get_data_agent_empty_call_grpc_asyncio(): +async def test_update_data_agent_sync_empty_call_grpc_asyncio(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), transport="grpc_asyncio", ) # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_data_agent), "__call__") as call: + with mock.patch.object( + type(client.transport.update_data_agent_sync), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - data_agent.DataAgent( + gcg_data_agent.DataAgent( name="name_value", display_name="display_name_value", description="description_value", ) ) - await client.get_data_agent(request=None) + await client.update_data_agent_sync(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = data_agent_service.GetDataAgentRequest() + request_msg = data_agent_service.UpdateDataAgentRequest() assert args[0] == request_msg @@ -6670,7 +8602,7 @@ async def test_get_data_agent_empty_call_grpc_asyncio(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_create_data_agent_empty_call_grpc_asyncio(): +async def test_delete_data_agent_empty_call_grpc_asyncio(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), transport="grpc_asyncio", @@ -6678,18 +8610,18 @@ async def test_create_data_agent_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_data_agent), "__call__" + type(client.transport.delete_data_agent), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - await client.create_data_agent(request=None) + await client.delete_data_agent(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = data_agent_service.CreateDataAgentRequest() + request_msg = data_agent_service.DeleteDataAgentRequest() assert args[0] == request_msg @@ -6697,7 +8629,7 @@ async def test_create_data_agent_empty_call_grpc_asyncio(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_update_data_agent_empty_call_grpc_asyncio(): +async def test_delete_data_agent_sync_empty_call_grpc_asyncio(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), transport="grpc_asyncio", @@ -6705,18 +8637,16 @@ async def test_update_data_agent_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_data_agent), "__call__" + type(client.transport.delete_data_agent_sync), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.update_data_agent(request=None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_data_agent_sync(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = data_agent_service.UpdateDataAgentRequest() + request_msg = data_agent_service.DeleteDataAgentRequest() assert args[0] == request_msg @@ -6724,26 +8654,27 @@ async def test_update_data_agent_empty_call_grpc_asyncio(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_delete_data_agent_empty_call_grpc_asyncio(): +async def test_get_iam_policy_empty_call_grpc_asyncio(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), transport="grpc_asyncio", ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_agent), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) ) - await client.delete_data_agent(request=None) + await client.get_iam_policy(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = data_agent_service.DeleteDataAgentRequest() + request_msg = iam_policy_pb2.GetIamPolicyRequest() assert args[0] == request_msg @@ -6751,14 +8682,14 @@ async def test_delete_data_agent_empty_call_grpc_asyncio(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_get_iam_policy_empty_call_grpc_asyncio(): +async def test_set_iam_policy_empty_call_grpc_asyncio(): client = DataAgentServiceAsyncClient( credentials=async_anonymous_credentials(), transport="grpc_asyncio", ) # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policy_pb2.Policy( @@ -6766,59 +8697,303 @@ async def test_get_iam_policy_empty_call_grpc_asyncio(): etag=b"etag_blob", ) ) - await client.get_iam_policy(request=None) + await client.set_iam_policy(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() + request_msg = iam_policy_pb2.SetIamPolicyRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = DataAgentServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_data_agents_rest_bad_request( + request_type=data_agent_service.ListDataAgentsRequest, +): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_data_agents(request) + + +@pytest.mark.parametrize( + "request_type", + [ + data_agent_service.ListDataAgentsRequest, + dict, + ], +) +def test_list_data_agents_rest_call_success(request_type): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_agent_service.ListDataAgentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_agent_service.ListDataAgentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_data_agents(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataAgentsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_data_agents_rest_interceptors(null_interceptor): + transport = transports.DataAgentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataAgentServiceRestInterceptor(), + ) + client = DataAgentServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataAgentServiceRestInterceptor, "post_list_data_agents" + ) as post, mock.patch.object( + transports.DataAgentServiceRestInterceptor, + "post_list_data_agents_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.DataAgentServiceRestInterceptor, "pre_list_data_agents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = data_agent_service.ListDataAgentsRequest.pb( + data_agent_service.ListDataAgentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = data_agent_service.ListDataAgentsResponse.to_json( + data_agent_service.ListDataAgentsResponse() + ) + req.return_value.content = return_value + + request = data_agent_service.ListDataAgentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = data_agent_service.ListDataAgentsResponse() + post_with_metadata.return_value = ( + data_agent_service.ListDataAgentsResponse(), + metadata, + ) + + client.list_data_agents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_accessible_data_agents_rest_bad_request( + request_type=data_agent_service.ListAccessibleDataAgentsRequest, +): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_accessible_data_agents(request) + + +@pytest.mark.parametrize( + "request_type", + [ + data_agent_service.ListAccessibleDataAgentsRequest, + dict, + ], +) +def test_list_accessible_data_agents_rest_call_success(request_type): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_agent_service.ListAccessibleDataAgentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_agent_service.ListAccessibleDataAgentsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_accessible_data_agents(request) - assert args[0] == request_msg + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAccessibleDataAgentsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_set_iam_policy_empty_call_grpc_asyncio(): - client = DataAgentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_accessible_data_agents_rest_interceptors(null_interceptor): + transport = transports.DataAgentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataAgentServiceRestInterceptor(), ) + client = DataAgentServiceClient(transport=transport) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy( - version=774, - etag=b"etag_blob", - ) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataAgentServiceRestInterceptor, "post_list_accessible_data_agents" + ) as post, mock.patch.object( + transports.DataAgentServiceRestInterceptor, + "post_list_accessible_data_agents_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.DataAgentServiceRestInterceptor, "pre_list_accessible_data_agents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = data_agent_service.ListAccessibleDataAgentsRequest.pb( + data_agent_service.ListAccessibleDataAgentsRequest() ) - await client.set_iam_policy(request=None) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = data_agent_service.ListAccessibleDataAgentsResponse.to_json( + data_agent_service.ListAccessibleDataAgentsResponse() + ) + req.return_value.content = return_value - assert args[0] == request_msg + request = data_agent_service.ListAccessibleDataAgentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = data_agent_service.ListAccessibleDataAgentsResponse() + post_with_metadata.return_value = ( + data_agent_service.ListAccessibleDataAgentsResponse(), + metadata, + ) + client.list_accessible_data_agents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) -def test_transport_kind_rest(): - transport = DataAgentServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() -def test_list_data_agents_rest_bad_request( - request_type=data_agent_service.ListDataAgentsRequest, +def test_get_data_agent_rest_bad_request( + request_type=data_agent_service.GetDataAgentRequest, ): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/dataAgents/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6833,31 +9008,32 @@ def test_list_data_agents_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_data_agents(request) + client.get_data_agent(request) @pytest.mark.parametrize( "request_type", [ - data_agent_service.ListDataAgentsRequest, + data_agent_service.GetDataAgentRequest, dict, ], ) -def test_list_data_agents_rest_call_success(request_type): +def test_get_data_agent_rest_call_success(request_type): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/dataAgents/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = data_agent_service.ListDataAgentsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = data_agent.DataAgent( + name="name_value", + display_name="display_name_value", + description="description_value", ) # Wrap the value into a proper Response obj @@ -6865,21 +9041,22 @@ def test_list_data_agents_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = data_agent_service.ListDataAgentsResponse.pb(return_value) + return_value = data_agent.DataAgent.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_data_agents(request) + response = client.get_data_agent(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataAgentsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, data_agent.DataAgent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_data_agents_rest_interceptors(null_interceptor): +def test_get_data_agent_rest_interceptors(null_interceptor): transport = transports.DataAgentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6893,18 +9070,17 @@ def test_list_data_agents_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DataAgentServiceRestInterceptor, "post_list_data_agents" + transports.DataAgentServiceRestInterceptor, "post_get_data_agent" ) as post, mock.patch.object( - transports.DataAgentServiceRestInterceptor, - "post_list_data_agents_with_metadata", + transports.DataAgentServiceRestInterceptor, "post_get_data_agent_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.DataAgentServiceRestInterceptor, "pre_list_data_agents" + transports.DataAgentServiceRestInterceptor, "pre_get_data_agent" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = data_agent_service.ListDataAgentsRequest.pb( - data_agent_service.ListDataAgentsRequest() + pb_message = data_agent_service.GetDataAgentRequest.pb( + data_agent_service.GetDataAgentRequest() ) transcode.return_value = { "method": "post", @@ -6916,107 +9092,325 @@ def test_list_data_agents_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = data_agent_service.ListDataAgentsResponse.to_json( - data_agent_service.ListDataAgentsResponse() - ) + return_value = data_agent.DataAgent.to_json(data_agent.DataAgent()) req.return_value.content = return_value - request = data_agent_service.ListDataAgentsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = data_agent_service.ListDataAgentsResponse() - post_with_metadata.return_value = ( - data_agent_service.ListDataAgentsResponse(), - metadata, - ) + request = data_agent_service.GetDataAgentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = data_agent.DataAgent() + post_with_metadata.return_value = data_agent.DataAgent(), metadata + + client.get_data_agent( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_data_agent_rest_bad_request( + request_type=data_agent_service.CreateDataAgentRequest, +): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_data_agent(request) + + +@pytest.mark.parametrize( + "request_type", + [ + data_agent_service.CreateDataAgentRequest, + dict, + ], +) +def test_create_data_agent_rest_call_success(request_type): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["data_agent"] = { + "data_analytics_agent": { + "staging_context": { + "system_instruction": "system_instruction_value", + "datasource_references": { + "bq": { + "table_references": [ + { + "project_id": "project_id_value", + "dataset_id": "dataset_id_value", + "table_id": "table_id_value", + "schema": { + "fields": [ + { + "name": "name_value", + "type_": "type__value", + "description": "description_value", + "mode": "mode_value", + "synonyms": [ + "synonyms_value1", + "synonyms_value2", + ], + "tags": ["tags_value1", "tags_value2"], + "display_name": "display_name_value", + "subfields": {}, + "category": "category_value", + "value_format": "value_format_value", + } + ], + "description": "description_value", + "synonyms": ["synonyms_value1", "synonyms_value2"], + "tags": ["tags_value1", "tags_value2"], + "display_name": "display_name_value", + "filters": [ + { + "field": "field_value", + "value": "value_value", + "type_": 1, + } + ], + }, + } + ] + }, + "studio": { + "studio_references": [{"datasource_id": "datasource_id_value"}] + }, + "looker": { + "explore_references": [ + { + "looker_instance_uri": "looker_instance_uri_value", + "private_looker_instance_info": { + "looker_instance_id": "looker_instance_id_value", + "service_directory_name": "service_directory_name_value", + }, + "lookml_model": "lookml_model_value", + "explore": "explore_value", + "schema": {}, + } + ], + "credentials": { + "oauth": { + "secret": { + "client_id": "client_id_value", + "client_secret": "client_secret_value", + }, + "token": {"access_token": "access_token_value"}, + } + }, + }, + "alloydb": { + "database_reference": { + "project_id": "project_id_value", + "region": "region_value", + "cluster_id": "cluster_id_value", + "instance_id": "instance_id_value", + "database_id": "database_id_value", + "table_ids": ["table_ids_value1", "table_ids_value2"], + }, + "agent_context_reference": { + "context_set_id": "context_set_id_value" + }, + }, + "spanner_reference": { + "database_reference": { + "engine": 1, + "project_id": "project_id_value", + "region": "region_value", + "instance_id": "instance_id_value", + "database_id": "database_id_value", + "table_ids": ["table_ids_value1", "table_ids_value2"], + }, + "agent_context_reference": {}, + }, + "cloud_sql_reference": { + "database_reference": { + "engine": 1, + "project_id": "project_id_value", + "region": "region_value", + "instance_id": "instance_id_value", + "database_id": "database_id_value", + "table_ids": ["table_ids_value1", "table_ids_value2"], + }, + "agent_context_reference": {}, + }, + }, + "options": { + "chart": {"image": {"no_image": {}, "svg": {}}}, + "analysis": {"python": {"enabled": True}}, + "datasource": {"big_query_max_billed_bytes": {"value": 541}}, + }, + "example_queries": [ + { + "sql_query": "sql_query_value", + "natural_language_question": "natural_language_question_value", + } + ], + "looker_golden_queries": [ + { + "natural_language_questions": [ + "natural_language_questions_value1", + "natural_language_questions_value2", + ], + "looker_query": { + "model": "model_value", + "explore": "explore_value", + "fields": ["fields_value1", "fields_value2"], + "filters": [ + {"field": "field_value", "value": "value_value"} + ], + "sorts": ["sorts_value1", "sorts_value2"], + "limit": "limit_value", + }, + } + ], + "glossary_terms": [ + { + "display_name": "display_name_value", + "description": "description_value", + "labels": ["labels_value1", "labels_value2"], + } + ], + "schema_relationships": [ + { + "left_schema_paths": { + "table_fqn": "table_fqn_value", + "paths": ["paths_value1", "paths_value2"], + }, + "right_schema_paths": {}, + "sources": [1], + "confidence_score": 0.1673, + } + ], + }, + "published_context": {}, + "last_published_context": {}, + }, + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "purge_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = data_agent_service.CreateDataAgentRequest.meta.fields["data_agent"] - client.list_data_agents( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields -def test_list_accessible_data_agents_rest_bad_request( - request_type=data_agent_service.ListAccessibleDataAgentsRequest, -): - client = DataAgentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_accessible_data_agents(request) + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["data_agent"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -@pytest.mark.parametrize( - "request_type", - [ - data_agent_service.ListAccessibleDataAgentsRequest, - dict, - ], -) -def test_list_accessible_data_agents_rest_call_success(request_type): - client = DataAgentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_agent"][field])): + del request_init["data_agent"][field][i][subfield] + else: + del request_init["data_agent"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = data_agent_service.ListAccessibleDataAgentsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = data_agent_service.ListAccessibleDataAgentsResponse.pb( - return_value - ) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_accessible_data_agents(request) + response = client.create_data_agent(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAccessibleDataAgentsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_accessible_data_agents_rest_interceptors(null_interceptor): +def test_create_data_agent_rest_interceptors(null_interceptor): transport = transports.DataAgentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7030,18 +9424,20 @@ def test_list_accessible_data_agents_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DataAgentServiceRestInterceptor, "post_list_accessible_data_agents" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DataAgentServiceRestInterceptor, "post_create_data_agent" ) as post, mock.patch.object( transports.DataAgentServiceRestInterceptor, - "post_list_accessible_data_agents_with_metadata", + "post_create_data_agent_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.DataAgentServiceRestInterceptor, "pre_list_accessible_data_agents" + transports.DataAgentServiceRestInterceptor, "pre_create_data_agent" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = data_agent_service.ListAccessibleDataAgentsRequest.pb( - data_agent_service.ListAccessibleDataAgentsRequest() + pb_message = data_agent_service.CreateDataAgentRequest.pb( + data_agent_service.CreateDataAgentRequest() ) transcode.return_value = { "method": "post", @@ -7053,24 +9449,19 @@ def test_list_accessible_data_agents_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = data_agent_service.ListAccessibleDataAgentsResponse.to_json( - data_agent_service.ListAccessibleDataAgentsResponse() - ) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = data_agent_service.ListAccessibleDataAgentsRequest() + request = data_agent_service.CreateDataAgentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = data_agent_service.ListAccessibleDataAgentsResponse() - post_with_metadata.return_value = ( - data_agent_service.ListAccessibleDataAgentsResponse(), - metadata, - ) + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.list_accessible_data_agents( + client.create_data_agent( request, metadata=[ ("key", "val"), @@ -7083,14 +9474,14 @@ def test_list_accessible_data_agents_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_data_agent_rest_bad_request( - request_type=data_agent_service.GetDataAgentRequest, +def test_create_data_agent_sync_rest_bad_request( + request_type=data_agent_service.CreateDataAgentRequest, ): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/dataAgents/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7105,29 +9496,262 @@ def test_get_data_agent_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_data_agent(request) + client.create_data_agent_sync(request) @pytest.mark.parametrize( "request_type", [ - data_agent_service.GetDataAgentRequest, + data_agent_service.CreateDataAgentRequest, dict, ], ) -def test_get_data_agent_rest_call_success(request_type): +def test_create_data_agent_sync_rest_call_success(request_type): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/dataAgents/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["data_agent"] = { + "data_analytics_agent": { + "staging_context": { + "system_instruction": "system_instruction_value", + "datasource_references": { + "bq": { + "table_references": [ + { + "project_id": "project_id_value", + "dataset_id": "dataset_id_value", + "table_id": "table_id_value", + "schema": { + "fields": [ + { + "name": "name_value", + "type_": "type__value", + "description": "description_value", + "mode": "mode_value", + "synonyms": [ + "synonyms_value1", + "synonyms_value2", + ], + "tags": ["tags_value1", "tags_value2"], + "display_name": "display_name_value", + "subfields": {}, + "category": "category_value", + "value_format": "value_format_value", + } + ], + "description": "description_value", + "synonyms": ["synonyms_value1", "synonyms_value2"], + "tags": ["tags_value1", "tags_value2"], + "display_name": "display_name_value", + "filters": [ + { + "field": "field_value", + "value": "value_value", + "type_": 1, + } + ], + }, + } + ] + }, + "studio": { + "studio_references": [{"datasource_id": "datasource_id_value"}] + }, + "looker": { + "explore_references": [ + { + "looker_instance_uri": "looker_instance_uri_value", + "private_looker_instance_info": { + "looker_instance_id": "looker_instance_id_value", + "service_directory_name": "service_directory_name_value", + }, + "lookml_model": "lookml_model_value", + "explore": "explore_value", + "schema": {}, + } + ], + "credentials": { + "oauth": { + "secret": { + "client_id": "client_id_value", + "client_secret": "client_secret_value", + }, + "token": {"access_token": "access_token_value"}, + } + }, + }, + "alloydb": { + "database_reference": { + "project_id": "project_id_value", + "region": "region_value", + "cluster_id": "cluster_id_value", + "instance_id": "instance_id_value", + "database_id": "database_id_value", + "table_ids": ["table_ids_value1", "table_ids_value2"], + }, + "agent_context_reference": { + "context_set_id": "context_set_id_value" + }, + }, + "spanner_reference": { + "database_reference": { + "engine": 1, + "project_id": "project_id_value", + "region": "region_value", + "instance_id": "instance_id_value", + "database_id": "database_id_value", + "table_ids": ["table_ids_value1", "table_ids_value2"], + }, + "agent_context_reference": {}, + }, + "cloud_sql_reference": { + "database_reference": { + "engine": 1, + "project_id": "project_id_value", + "region": "region_value", + "instance_id": "instance_id_value", + "database_id": "database_id_value", + "table_ids": ["table_ids_value1", "table_ids_value2"], + }, + "agent_context_reference": {}, + }, + }, + "options": { + "chart": {"image": {"no_image": {}, "svg": {}}}, + "analysis": {"python": {"enabled": True}}, + "datasource": {"big_query_max_billed_bytes": {"value": 541}}, + }, + "example_queries": [ + { + "sql_query": "sql_query_value", + "natural_language_question": "natural_language_question_value", + } + ], + "looker_golden_queries": [ + { + "natural_language_questions": [ + "natural_language_questions_value1", + "natural_language_questions_value2", + ], + "looker_query": { + "model": "model_value", + "explore": "explore_value", + "fields": ["fields_value1", "fields_value2"], + "filters": [ + {"field": "field_value", "value": "value_value"} + ], + "sorts": ["sorts_value1", "sorts_value2"], + "limit": "limit_value", + }, + } + ], + "glossary_terms": [ + { + "display_name": "display_name_value", + "description": "description_value", + "labels": ["labels_value1", "labels_value2"], + } + ], + "schema_relationships": [ + { + "left_schema_paths": { + "table_fqn": "table_fqn_value", + "paths": ["paths_value1", "paths_value2"], + }, + "right_schema_paths": {}, + "sources": [1], + "confidence_score": 0.1673, + } + ], + }, + "published_context": {}, + "last_published_context": {}, + }, + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "purge_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = data_agent_service.CreateDataAgentRequest.meta.fields["data_agent"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["data_agent"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_agent"][field])): + del request_init["data_agent"][field][i][subfield] + else: + del request_init["data_agent"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = data_agent.DataAgent( + return_value = gcg_data_agent.DataAgent( name="name_value", display_name="display_name_value", description="description_value", @@ -7138,22 +9762,22 @@ def test_get_data_agent_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = data_agent.DataAgent.pb(return_value) + return_value = gcg_data_agent.DataAgent.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_data_agent(request) + response = client.create_data_agent_sync(request) # Establish that the response is the type that we expect. - assert isinstance(response, data_agent.DataAgent) + assert isinstance(response, gcg_data_agent.DataAgent) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_data_agent_rest_interceptors(null_interceptor): +def test_create_data_agent_sync_rest_interceptors(null_interceptor): transport = transports.DataAgentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7167,17 +9791,18 @@ def test_get_data_agent_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DataAgentServiceRestInterceptor, "post_get_data_agent" + transports.DataAgentServiceRestInterceptor, "post_create_data_agent_sync" ) as post, mock.patch.object( - transports.DataAgentServiceRestInterceptor, "post_get_data_agent_with_metadata" + transports.DataAgentServiceRestInterceptor, + "post_create_data_agent_sync_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.DataAgentServiceRestInterceptor, "pre_get_data_agent" + transports.DataAgentServiceRestInterceptor, "pre_create_data_agent_sync" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = data_agent_service.GetDataAgentRequest.pb( - data_agent_service.GetDataAgentRequest() + pb_message = data_agent_service.CreateDataAgentRequest.pb( + data_agent_service.CreateDataAgentRequest() ) transcode.return_value = { "method": "post", @@ -7189,19 +9814,19 @@ def test_get_data_agent_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = data_agent.DataAgent.to_json(data_agent.DataAgent()) + return_value = gcg_data_agent.DataAgent.to_json(gcg_data_agent.DataAgent()) req.return_value.content = return_value - request = data_agent_service.GetDataAgentRequest() + request = data_agent_service.CreateDataAgentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = data_agent.DataAgent() - post_with_metadata.return_value = data_agent.DataAgent(), metadata + post.return_value = gcg_data_agent.DataAgent() + post_with_metadata.return_value = gcg_data_agent.DataAgent(), metadata - client.get_data_agent( + client.create_data_agent_sync( request, metadata=[ ("key", "val"), @@ -7214,14 +9839,16 @@ def test_get_data_agent_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_data_agent_rest_bad_request( - request_type=data_agent_service.CreateDataAgentRequest, +def test_update_data_agent_rest_bad_request( + request_type=data_agent_service.UpdateDataAgentRequest, ): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "data_agent": {"name": "projects/sample1/locations/sample2/dataAgents/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7236,23 +9863,25 @@ def test_create_data_agent_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_data_agent(request) + client.update_data_agent(request) @pytest.mark.parametrize( "request_type", [ - data_agent_service.CreateDataAgentRequest, + data_agent_service.UpdateDataAgentRequest, dict, ], ) -def test_create_data_agent_rest_call_success(request_type): +def test_update_data_agent_rest_call_success(request_type): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "data_agent": {"name": "projects/sample1/locations/sample2/dataAgents/sample3"} + } request_init["data_agent"] = { "data_analytics_agent": { "staging_context": { @@ -7410,7 +10039,7 @@ def test_create_data_agent_rest_call_success(request_type): "published_context": {}, "last_published_context": {}, }, - "name": "name_value", + "name": "projects/sample1/locations/sample2/dataAgents/sample3", "display_name": "display_name_value", "description": "description_value", "labels": {}, @@ -7424,7 +10053,7 @@ def test_create_data_agent_rest_call_success(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = data_agent_service.CreateDataAgentRequest.meta.fields["data_agent"] + test_field = data_agent_service.UpdateDataAgentRequest.meta.fields["data_agent"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -7500,14 +10129,14 @@ def get_message_fields(field): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_data_agent(request) + response = client.update_data_agent(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_data_agent_rest_interceptors(null_interceptor): +def test_update_data_agent_rest_interceptors(null_interceptor): transport = transports.DataAgentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7523,18 +10152,18 @@ def test_create_data_agent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.DataAgentServiceRestInterceptor, "post_create_data_agent" + transports.DataAgentServiceRestInterceptor, "post_update_data_agent" ) as post, mock.patch.object( transports.DataAgentServiceRestInterceptor, - "post_create_data_agent_with_metadata", + "post_update_data_agent_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.DataAgentServiceRestInterceptor, "pre_create_data_agent" + transports.DataAgentServiceRestInterceptor, "pre_update_data_agent" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = data_agent_service.CreateDataAgentRequest.pb( - data_agent_service.CreateDataAgentRequest() + pb_message = data_agent_service.UpdateDataAgentRequest.pb( + data_agent_service.UpdateDataAgentRequest() ) transcode.return_value = { "method": "post", @@ -7549,7 +10178,7 @@ def test_create_data_agent_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = data_agent_service.CreateDataAgentRequest() + request = data_agent_service.UpdateDataAgentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -7558,7 +10187,7 @@ def test_create_data_agent_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.create_data_agent( + client.update_data_agent( request, metadata=[ ("key", "val"), @@ -7571,7 +10200,7 @@ def test_create_data_agent_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_update_data_agent_rest_bad_request( +def test_update_data_agent_sync_rest_bad_request( request_type=data_agent_service.UpdateDataAgentRequest, ): client = DataAgentServiceClient( @@ -7595,7 +10224,7 @@ def test_update_data_agent_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_data_agent(request) + client.update_data_agent_sync(request) @pytest.mark.parametrize( @@ -7605,7 +10234,7 @@ def test_update_data_agent_rest_bad_request( dict, ], ) -def test_update_data_agent_rest_call_success(request_type): +def test_update_data_agent_sync_rest_call_success(request_type): client = DataAgentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7852,23 +10481,33 @@ def get_message_fields(field): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = gcg_data_agent.DataAgent( + name="name_value", + display_name="display_name_value", + description="description_value", + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcg_data_agent.DataAgent.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_data_agent(request) + response = client.update_data_agent_sync(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, gcg_data_agent.DataAgent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_data_agent_rest_interceptors(null_interceptor): +def test_update_data_agent_sync_rest_interceptors(null_interceptor): transport = transports.DataAgentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7882,14 +10521,12 @@ def test_update_data_agent_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.DataAgentServiceRestInterceptor, "post_update_data_agent" + transports.DataAgentServiceRestInterceptor, "post_update_data_agent_sync" ) as post, mock.patch.object( transports.DataAgentServiceRestInterceptor, - "post_update_data_agent_with_metadata", + "post_update_data_agent_sync_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.DataAgentServiceRestInterceptor, "pre_update_data_agent" + transports.DataAgentServiceRestInterceptor, "pre_update_data_agent_sync" ) as pre: pre.assert_not_called() post.assert_not_called() @@ -7907,7 +10544,7 @@ def test_update_data_agent_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = gcg_data_agent.DataAgent.to_json(gcg_data_agent.DataAgent()) req.return_value.content = return_value request = data_agent_service.UpdateDataAgentRequest() @@ -7916,10 +10553,10 @@ def test_update_data_agent_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = gcg_data_agent.DataAgent() + post_with_metadata.return_value = gcg_data_agent.DataAgent(), metadata - client.update_data_agent( + client.update_data_agent_sync( request, metadata=[ ("key", "val"), @@ -8056,6 +10693,115 @@ def test_delete_data_agent_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() +def test_delete_data_agent_sync_rest_bad_request( + request_type=data_agent_service.DeleteDataAgentRequest, +): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dataAgents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_data_agent_sync(request) + + +@pytest.mark.parametrize( + "request_type", + [ + data_agent_service.DeleteDataAgentRequest, + dict, + ], +) +def test_delete_data_agent_sync_rest_call_success(request_type): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dataAgents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_data_agent_sync(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_data_agent_sync_rest_interceptors(null_interceptor): + transport = transports.DataAgentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataAgentServiceRestInterceptor(), + ) + client = DataAgentServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataAgentServiceRestInterceptor, "pre_delete_data_agent_sync" + ) as pre: + pre.assert_not_called() + pb_message = data_agent_service.DeleteDataAgentRequest.pb( + data_agent_service.DeleteDataAgentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = data_agent_service.DeleteDataAgentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_data_agent_sync( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + def test_get_iam_policy_rest_bad_request( request_type=iam_policy_pb2.GetIamPolicyRequest, ): @@ -8763,6 +11509,28 @@ def test_create_data_agent_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_data_agent_sync_empty_call_rest(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_agent_sync), "__call__" + ) as call: + client.create_data_agent_sync(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_agent_service.CreateDataAgentRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_update_data_agent_empty_call_rest(): @@ -8785,6 +11553,28 @@ def test_update_data_agent_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_data_agent_sync_empty_call_rest(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_agent_sync), "__call__" + ) as call: + client.update_data_agent_sync(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_agent_service.UpdateDataAgentRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_delete_data_agent_empty_call_rest(): @@ -8807,6 +11597,28 @@ def test_delete_data_agent_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_data_agent_sync_empty_call_rest(): + client = DataAgentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_agent_sync), "__call__" + ) as call: + client.delete_data_agent_sync(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_agent_service.DeleteDataAgentRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_get_iam_policy_empty_call_rest(): @@ -8901,8 +11713,11 @@ def test_data_agent_service_base_transport(): "list_accessible_data_agents", "get_data_agent", "create_data_agent", + "create_data_agent_sync", "update_data_agent", + "update_data_agent_sync", "delete_data_agent", + "delete_data_agent_sync", "get_iam_policy", "set_iam_policy", "get_location", @@ -9186,12 +12001,21 @@ def test_data_agent_service_client_transport_session_collision(transport_name): session1 = client1.transport.create_data_agent._session session2 = client2.transport.create_data_agent._session assert session1 != session2 + session1 = client1.transport.create_data_agent_sync._session + session2 = client2.transport.create_data_agent_sync._session + assert session1 != session2 session1 = client1.transport.update_data_agent._session session2 = client2.transport.update_data_agent._session assert session1 != session2 + session1 = client1.transport.update_data_agent_sync._session + session2 = client2.transport.update_data_agent_sync._session + assert session1 != session2 session1 = client1.transport.delete_data_agent._session session2 = client2.transport.delete_data_agent._session assert session1 != session2 + session1 = client1.transport.delete_data_agent_sync._session + session2 = client2.transport.delete_data_agent_sync._session + assert session1 != session2 session1 = client1.transport.get_iam_policy._session session2 = client2.transport.get_iam_policy._session assert session1 != session2 diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/types/resources.py b/packages/google-cloud-kms/google/cloud/kms_v1/types/resources.py index de7863d8d1f9..74d2c150471b 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/types/resources.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/types/resources.py @@ -99,6 +99,12 @@ class AccessReason(proto.Enum): REASON_NOT_EXPECTED (7): No reason is expected for this key request. MODIFIED_CUSTOMER_INITIATED_ACCESS (8): + Deprecated: This code is no longer generated by Google + Cloud. The GOOGLE_RESPONSE_TO_PRODUCTION_ALERT justification + codes available in both Key Access Justifications and Access + Transparency logs provide customer-visible signals of + emergency access in more precise contexts. + Customer uses their account to perform any access to their own data which their IAM policy authorizes, and one of the following is true: @@ -110,6 +116,12 @@ class AccessReason(proto.Enum): interacted with a resource in the same project or folder as the currently accessed resource within the past 7 days. MODIFIED_GOOGLE_INITIATED_SYSTEM_OPERATION (9): + Deprecated: This code is no longer generated by Google + Cloud. The GOOGLE_RESPONSE_TO_PRODUCTION_ALERT justification + codes available in both Key Access Justifications and Access + Transparency logs provide customer-visible signals of + emergency access in more precise contexts. + Google systems access customer data to help optimize the structure of the data or quality for future uses by the customer, and one of the following is true: diff --git a/packages/google-cloud-netapp/google/cloud/netapp/__init__.py b/packages/google-cloud-netapp/google/cloud/netapp/__init__.py index 1db4ef3fb6a1..20d11c12667d 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp/__init__.py +++ b/packages/google-cloud-netapp/google/cloud/netapp/__init__.py @@ -63,10 +63,21 @@ FlexPerformance, HybridReplicationSchedule, LocationMetadata, + OsType, QosType, ServiceLevel, + StoragePoolType, UserCommands, ) +from google.cloud.netapp_v1.types.host_group import ( + CreateHostGroupRequest, + DeleteHostGroupRequest, + GetHostGroupRequest, + HostGroup, + ListHostGroupsRequest, + ListHostGroupsResponse, + UpdateHostGroupRequest, +) from google.cloud.netapp_v1.types.kms import ( CreateKmsConfigRequest, DeleteKmsConfigRequest, @@ -128,6 +139,10 @@ from google.cloud.netapp_v1.types.volume import ( AccessType, BackupConfig, + BlockDevice, + CacheConfig, + CacheParameters, + CachePrePopulate, CreateVolumeRequest, DailySchedule, DeleteVolumeRequest, @@ -140,6 +155,8 @@ MonthlySchedule, MountOption, Protocols, + RestoreBackupFilesRequest, + RestoreBackupFilesResponse, RestoreParameters, RestrictedAction, RevertVolumeRequest, @@ -191,8 +208,17 @@ "EncryptionType", "FlexPerformance", "HybridReplicationSchedule", + "OsType", "QosType", "ServiceLevel", + "StoragePoolType", + "CreateHostGroupRequest", + "DeleteHostGroupRequest", + "GetHostGroupRequest", + "HostGroup", + "ListHostGroupsRequest", + "ListHostGroupsResponse", + "UpdateHostGroupRequest", "CreateKmsConfigRequest", "DeleteKmsConfigRequest", "EncryptVolumesRequest", @@ -242,6 +268,10 @@ "UpdateStoragePoolRequest", "ValidateDirectoryServiceRequest", "BackupConfig", + "BlockDevice", + "CacheConfig", + "CacheParameters", + "CachePrePopulate", "CreateVolumeRequest", "DailySchedule", "DeleteVolumeRequest", @@ -253,6 +283,8 @@ "ListVolumesResponse", "MonthlySchedule", "MountOption", + "RestoreBackupFilesRequest", + "RestoreBackupFilesResponse", "RestoreParameters", "RevertVolumeRequest", "SimpleExportPolicyRule", diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/__init__.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/__init__.py index f4beaad0d62d..c8538bb711c0 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/__init__.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/__init__.py @@ -72,10 +72,21 @@ FlexPerformance, HybridReplicationSchedule, LocationMetadata, + OsType, QosType, ServiceLevel, + StoragePoolType, UserCommands, ) +from .types.host_group import ( + CreateHostGroupRequest, + DeleteHostGroupRequest, + GetHostGroupRequest, + HostGroup, + ListHostGroupsRequest, + ListHostGroupsResponse, + UpdateHostGroupRequest, +) from .types.kms import ( CreateKmsConfigRequest, DeleteKmsConfigRequest, @@ -137,6 +148,10 @@ from .types.volume import ( AccessType, BackupConfig, + BlockDevice, + CacheConfig, + CacheParameters, + CachePrePopulate, CreateVolumeRequest, DailySchedule, DeleteVolumeRequest, @@ -149,6 +164,8 @@ MonthlySchedule, MountOption, Protocols, + RestoreBackupFilesRequest, + RestoreBackupFilesResponse, RestoreParameters, RestrictedAction, RevertVolumeRequest, @@ -264,10 +281,15 @@ def _get_version(dependency_name): "BackupConfig", "BackupPolicy", "BackupVault", + "BlockDevice", + "CacheConfig", + "CacheParameters", + "CachePrePopulate", "CreateActiveDirectoryRequest", "CreateBackupPolicyRequest", "CreateBackupRequest", "CreateBackupVaultRequest", + "CreateHostGroupRequest", "CreateKmsConfigRequest", "CreateQuotaRuleRequest", "CreateReplicationRequest", @@ -279,6 +301,7 @@ def _get_version(dependency_name): "DeleteBackupPolicyRequest", "DeleteBackupRequest", "DeleteBackupVaultRequest", + "DeleteHostGroupRequest", "DeleteKmsConfigRequest", "DeleteQuotaRuleRequest", "DeleteReplicationRequest", @@ -296,12 +319,14 @@ def _get_version(dependency_name): "GetBackupPolicyRequest", "GetBackupRequest", "GetBackupVaultRequest", + "GetHostGroupRequest", "GetKmsConfigRequest", "GetQuotaRuleRequest", "GetReplicationRequest", "GetSnapshotRequest", "GetStoragePoolRequest", "GetVolumeRequest", + "HostGroup", "HourlySchedule", "HybridPeeringDetails", "HybridReplicationParameters", @@ -315,6 +340,8 @@ def _get_version(dependency_name): "ListBackupVaultsResponse", "ListBackupsRequest", "ListBackupsResponse", + "ListHostGroupsRequest", + "ListHostGroupsResponse", "ListKmsConfigsRequest", "ListKmsConfigsResponse", "ListQuotaRulesRequest", @@ -332,10 +359,13 @@ def _get_version(dependency_name): "MountOption", "NetAppClient", "OperationMetadata", + "OsType", "Protocols", "QosType", "QuotaRule", "Replication", + "RestoreBackupFilesRequest", + "RestoreBackupFilesResponse", "RestoreParameters", "RestrictedAction", "ResumeReplicationRequest", @@ -349,6 +379,7 @@ def _get_version(dependency_name): "SnapshotPolicy", "StopReplicationRequest", "StoragePool", + "StoragePoolType", "SwitchActiveReplicaZoneRequest", "SyncReplicationRequest", "TieringPolicy", @@ -357,6 +388,7 @@ def _get_version(dependency_name): "UpdateBackupPolicyRequest", "UpdateBackupRequest", "UpdateBackupVaultRequest", + "UpdateHostGroupRequest", "UpdateKmsConfigRequest", "UpdateQuotaRuleRequest", "UpdateReplicationRequest", diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_metadata.json b/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_metadata.json index c06767c5fa9a..6a72b1befa76 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_metadata.json +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_metadata.json @@ -30,6 +30,11 @@ "create_backup_vault" ] }, + "CreateHostGroup": { + "methods": [ + "create_host_group" + ] + }, "CreateKmsConfig": { "methods": [ "create_kms_config" @@ -80,6 +85,11 @@ "delete_backup_vault" ] }, + "DeleteHostGroup": { + "methods": [ + "delete_host_group" + ] + }, "DeleteKmsConfig": { "methods": [ "delete_kms_config" @@ -140,6 +150,11 @@ "get_backup_vault" ] }, + "GetHostGroup": { + "methods": [ + "get_host_group" + ] + }, "GetKmsConfig": { "methods": [ "get_kms_config" @@ -190,6 +205,11 @@ "list_backups" ] }, + "ListHostGroups": { + "methods": [ + "list_host_groups" + ] + }, "ListKmsConfigs": { "methods": [ "list_kms_configs" @@ -220,6 +240,11 @@ "list_volumes" ] }, + "RestoreBackupFiles": { + "methods": [ + "restore_backup_files" + ] + }, "ResumeReplication": { "methods": [ "resume_replication" @@ -270,6 +295,11 @@ "update_backup_vault" ] }, + "UpdateHostGroup": { + "methods": [ + "update_host_group" + ] + }, "UpdateKmsConfig": { "methods": [ "update_kms_config" @@ -335,6 +365,11 @@ "create_backup_vault" ] }, + "CreateHostGroup": { + "methods": [ + "create_host_group" + ] + }, "CreateKmsConfig": { "methods": [ "create_kms_config" @@ -385,6 +420,11 @@ "delete_backup_vault" ] }, + "DeleteHostGroup": { + "methods": [ + "delete_host_group" + ] + }, "DeleteKmsConfig": { "methods": [ "delete_kms_config" @@ -445,6 +485,11 @@ "get_backup_vault" ] }, + "GetHostGroup": { + "methods": [ + "get_host_group" + ] + }, "GetKmsConfig": { "methods": [ "get_kms_config" @@ -495,6 +540,11 @@ "list_backups" ] }, + "ListHostGroups": { + "methods": [ + "list_host_groups" + ] + }, "ListKmsConfigs": { "methods": [ "list_kms_configs" @@ -525,6 +575,11 @@ "list_volumes" ] }, + "RestoreBackupFiles": { + "methods": [ + "restore_backup_files" + ] + }, "ResumeReplication": { "methods": [ "resume_replication" @@ -575,6 +630,11 @@ "update_backup_vault" ] }, + "UpdateHostGroup": { + "methods": [ + "update_host_group" + ] + }, "UpdateKmsConfig": { "methods": [ "update_kms_config" @@ -640,6 +700,11 @@ "create_backup_vault" ] }, + "CreateHostGroup": { + "methods": [ + "create_host_group" + ] + }, "CreateKmsConfig": { "methods": [ "create_kms_config" @@ -690,6 +755,11 @@ "delete_backup_vault" ] }, + "DeleteHostGroup": { + "methods": [ + "delete_host_group" + ] + }, "DeleteKmsConfig": { "methods": [ "delete_kms_config" @@ -750,6 +820,11 @@ "get_backup_vault" ] }, + "GetHostGroup": { + "methods": [ + "get_host_group" + ] + }, "GetKmsConfig": { "methods": [ "get_kms_config" @@ -800,6 +875,11 @@ "list_backups" ] }, + "ListHostGroups": { + "methods": [ + "list_host_groups" + ] + }, "ListKmsConfigs": { "methods": [ "list_kms_configs" @@ -830,6 +910,11 @@ "list_volumes" ] }, + "RestoreBackupFiles": { + "methods": [ + "restore_backup_files" + ] + }, "ResumeReplication": { "methods": [ "resume_replication" @@ -880,6 +965,11 @@ "update_backup_vault" ] }, + "UpdateHostGroup": { + "methods": [ + "update_host_group" + ] + }, "UpdateKmsConfig": { "methods": [ "update_kms_config" diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/async_client.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/async_client.py index 6dba7ce5a69b..bfa2632a22e8 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/async_client.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/async_client.py @@ -61,7 +61,10 @@ from google.cloud.netapp_v1.types import backup_policy as gcn_backup_policy from google.cloud.netapp_v1.types import backup_vault from google.cloud.netapp_v1.types import backup_vault as gcn_backup_vault -from google.cloud.netapp_v1.types import cloud_netapp_service, common, kms +from google.cloud.netapp_v1.types import cloud_netapp_service, common +from google.cloud.netapp_v1.types import host_group +from google.cloud.netapp_v1.types import host_group as gcn_host_group +from google.cloud.netapp_v1.types import kms from google.cloud.netapp_v1.types import quota_rule from google.cloud.netapp_v1.types import quota_rule as gcn_quota_rule from google.cloud.netapp_v1.types import replication @@ -107,6 +110,8 @@ class NetAppAsyncClient: parse_backup_policy_path = staticmethod(NetAppClient.parse_backup_policy_path) backup_vault_path = staticmethod(NetAppClient.backup_vault_path) parse_backup_vault_path = staticmethod(NetAppClient.parse_backup_vault_path) + host_group_path = staticmethod(NetAppClient.host_group_path) + parse_host_group_path = staticmethod(NetAppClient.parse_host_group_path) kms_config_path = staticmethod(NetAppClient.kms_config_path) parse_kms_config_path = staticmethod(NetAppClient.parse_kms_config_path) network_path = staticmethod(NetAppClient.network_path) @@ -1488,7 +1493,7 @@ async def sample_create_volume(): volume.share_name = "share_name_value" volume.storage_pool = "storage_pool_value" volume.capacity_gib = 1247 - volume.protocols = ['SMB'] + volume.protocols = ['ISCSI'] request = netapp_v1.CreateVolumeRequest( parent="parent_value", @@ -1640,7 +1645,7 @@ async def sample_update_volume(): volume.share_name = "share_name_value" volume.storage_pool = "storage_pool_value" volume.capacity_gib = 1247 - volume.protocols = ['SMB'] + volume.protocols = ['ISCSI'] request = netapp_v1.UpdateVolumeRequest( volume=volume, @@ -3549,7 +3554,7 @@ async def sample_create_kms_config(): The result type for the operation will be :class:`google.cloud.netapp_v1.types.KmsConfig` - KmsConfig is the customer managed encryption key(CMEK) + KmsConfig is the customer-managed encryption key(CMEK) configuration. """ @@ -3671,7 +3676,7 @@ async def sample_get_kms_config(): Returns: google.cloud.netapp_v1.types.KmsConfig: - KmsConfig is the customer managed + KmsConfig is the customer-managed encryption key(CMEK) configuration. """ @@ -3803,7 +3808,7 @@ async def sample_update_kms_config(): The result type for the operation will be :class:`google.cloud.netapp_v1.types.KmsConfig` - KmsConfig is the customer managed encryption key(CMEK) + KmsConfig is the customer-managed encryption key(CMEK) configuration. """ @@ -3927,7 +3932,7 @@ async def sample_encrypt_volumes(): The result type for the operation will be :class:`google.cloud.netapp_v1.types.KmsConfig` - KmsConfig is the customer managed encryption key(CMEK) + KmsConfig is the customer-managed encryption key(CMEK) configuration. """ @@ -8048,6 +8053,783 @@ async def sample_delete_quota_rule(): # Done; return the response. return response + async def restore_backup_files( + self, + request: Optional[Union[volume.RestoreBackupFilesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Restore files from a backup to a volume. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + async def sample_restore_backup_files(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.RestoreBackupFilesRequest( + name="name_value", + backup="backup_value", + file_list=['file_list_value1', 'file_list_value2'], + ) + + # Make the request + operation = client.restore_backup_files(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.netapp_v1.types.RestoreBackupFilesRequest, dict]]): + The request object. RestoreBackupFilesRequest restores + files from a backup to a volume. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.netapp_v1.types.RestoreBackupFilesResponse` + RestoreBackupFilesResponse is the result of + RestoreBackupFilesRequest. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, volume.RestoreBackupFilesRequest): + request = volume.RestoreBackupFilesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.restore_backup_files + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + volume.RestoreBackupFilesResponse, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_host_groups( + self, + request: Optional[Union[host_group.ListHostGroupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListHostGroupsAsyncPager: + r"""Returns a list of host groups in a ``location``. Use ``-`` as + location to list host groups across all locations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + async def sample_list_host_groups(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.ListHostGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_host_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.netapp_v1.types.ListHostGroupsRequest, dict]]): + The request object. ListHostGroupsRequest for listing + host groups. + parent (:class:`str`): + Required. Parent value for + ListHostGroupsRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.netapp_v1.services.net_app.pagers.ListHostGroupsAsyncPager: + ListHostGroupsResponse is the + response to a ListHostGroupsRequest. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, host_group.ListHostGroupsRequest): + request = host_group.ListHostGroupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_host_groups + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListHostGroupsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_host_group( + self, + request: Optional[Union[host_group.GetHostGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> host_group.HostGroup: + r"""Returns details of the specified host group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + async def sample_get_host_group(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.GetHostGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_host_group(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.netapp_v1.types.GetHostGroupRequest, dict]]): + The request object. GetHostGroupRequest for getting a + host group. + name (:class:`str`): + Required. The resource name of the host group. Format: + ``projects/{project_number}/locations/{location_id}/hostGroups/{host_group_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.netapp_v1.types.HostGroup: + Host group is a collection of hosts + that can be used for accessing a Block + Volume. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, host_group.GetHostGroupRequest): + request = host_group.GetHostGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_host_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_host_group( + self, + request: Optional[Union[gcn_host_group.CreateHostGroupRequest, dict]] = None, + *, + parent: Optional[str] = None, + host_group: Optional[gcn_host_group.HostGroup] = None, + host_group_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new host group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + async def sample_create_host_group(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + host_group = netapp_v1.HostGroup() + host_group.type_ = "ISCSI_INITIATOR" + host_group.hosts = ['hosts_value1', 'hosts_value2'] + host_group.os_type = "ESXI" + + request = netapp_v1.CreateHostGroupRequest( + parent="parent_value", + host_group=host_group, + host_group_id="host_group_id_value", + ) + + # Make the request + operation = client.create_host_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.netapp_v1.types.CreateHostGroupRequest, dict]]): + The request object. CreateHostGroupRequest for creating a + host group. + parent (:class:`str`): + Required. Parent value for + CreateHostGroupRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + host_group (:class:`google.cloud.netapp_v1.types.HostGroup`): + Required. Fields of the host group to + create. + + This corresponds to the ``host_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + host_group_id (:class:`str`): + Required. ID of the host group to + create. Must be unique within the parent + resource. Must contain only letters, + numbers, and hyphen, with the first + character a letter or underscore, the + last a letter or underscore or a number, + and a 63 character maximum. + + This corresponds to the ``host_group_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.netapp_v1.types.HostGroup` Host group is a collection of hosts that can be used for accessing a Block + Volume. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, host_group, host_group_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcn_host_group.CreateHostGroupRequest): + request = gcn_host_group.CreateHostGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if host_group is not None: + request.host_group = host_group + if host_group_id is not None: + request.host_group_id = host_group_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_host_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_host_group.HostGroup, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_host_group( + self, + request: Optional[Union[gcn_host_group.UpdateHostGroupRequest, dict]] = None, + *, + host_group: Optional[gcn_host_group.HostGroup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an existing host group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + async def sample_update_host_group(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + host_group = netapp_v1.HostGroup() + host_group.type_ = "ISCSI_INITIATOR" + host_group.hosts = ['hosts_value1', 'hosts_value2'] + host_group.os_type = "ESXI" + + request = netapp_v1.UpdateHostGroupRequest( + host_group=host_group, + ) + + # Make the request + operation = client.update_host_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.netapp_v1.types.UpdateHostGroupRequest, dict]]): + The request object. UpdateHostGroupRequest for updating a + host group. + host_group (:class:`google.cloud.netapp_v1.types.HostGroup`): + Required. The host group to update. The host group's + ``name`` field is used to identify the host group. + Format: + ``projects/{project_number}/locations/{location_id}/hostGroups/{host_group_id}``. + + This corresponds to the ``host_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.netapp_v1.types.HostGroup` Host group is a collection of hosts that can be used for accessing a Block + Volume. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [host_group, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcn_host_group.UpdateHostGroupRequest): + request = gcn_host_group.UpdateHostGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if host_group is not None: + request.host_group = host_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_host_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("host_group.name", request.host_group.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_host_group.HostGroup, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_host_group( + self, + request: Optional[Union[host_group.DeleteHostGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a host group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + async def sample_delete_host_group(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.DeleteHostGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_host_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.netapp_v1.types.DeleteHostGroupRequest, dict]]): + The request object. DeleteHostGroupRequest for deleting a + single host group. + name (:class:`str`): + Required. The resource name of the host group. Format: + ``projects/{project_number}/locations/{location_id}/hostGroups/{host_group_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, host_group.DeleteHostGroupRequest): + request = host_group.DeleteHostGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_host_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/client.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/client.py index 2dcc28277ce1..e842865d9cec 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/client.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/client.py @@ -78,7 +78,10 @@ from google.cloud.netapp_v1.types import backup_policy as gcn_backup_policy from google.cloud.netapp_v1.types import backup_vault from google.cloud.netapp_v1.types import backup_vault as gcn_backup_vault -from google.cloud.netapp_v1.types import cloud_netapp_service, common, kms +from google.cloud.netapp_v1.types import cloud_netapp_service, common +from google.cloud.netapp_v1.types import host_group +from google.cloud.netapp_v1.types import host_group as gcn_host_group +from google.cloud.netapp_v1.types import kms from google.cloud.netapp_v1.types import quota_rule from google.cloud.netapp_v1.types import quota_rule as gcn_quota_rule from google.cloud.netapp_v1.types import replication @@ -338,6 +341,28 @@ def parse_backup_vault_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def host_group_path( + project: str, + location: str, + host_group: str, + ) -> str: + """Returns a fully-qualified host_group string.""" + return "projects/{project}/locations/{location}/hostGroups/{host_group}".format( + project=project, + location=location, + host_group=host_group, + ) + + @staticmethod + def parse_host_group_path(path: str) -> Dict[str, str]: + """Parses a host_group path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/hostGroups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def kms_config_path( project: str, @@ -2118,7 +2143,7 @@ def sample_create_volume(): volume.share_name = "share_name_value" volume.storage_pool = "storage_pool_value" volume.capacity_gib = 1247 - volume.protocols = ['SMB'] + volume.protocols = ['ISCSI'] request = netapp_v1.CreateVolumeRequest( parent="parent_value", @@ -2267,7 +2292,7 @@ def sample_update_volume(): volume.share_name = "share_name_value" volume.storage_pool = "storage_pool_value" volume.capacity_gib = 1247 - volume.protocols = ['SMB'] + volume.protocols = ['ISCSI'] request = netapp_v1.UpdateVolumeRequest( volume=volume, @@ -4135,7 +4160,7 @@ def sample_create_kms_config(): The result type for the operation will be :class:`google.cloud.netapp_v1.types.KmsConfig` - KmsConfig is the customer managed encryption key(CMEK) + KmsConfig is the customer-managed encryption key(CMEK) configuration. """ @@ -4254,7 +4279,7 @@ def sample_get_kms_config(): Returns: google.cloud.netapp_v1.types.KmsConfig: - KmsConfig is the customer managed + KmsConfig is the customer-managed encryption key(CMEK) configuration. """ @@ -4383,7 +4408,7 @@ def sample_update_kms_config(): The result type for the operation will be :class:`google.cloud.netapp_v1.types.KmsConfig` - KmsConfig is the customer managed encryption key(CMEK) + KmsConfig is the customer-managed encryption key(CMEK) configuration. """ @@ -4504,7 +4529,7 @@ def sample_encrypt_volumes(): The result type for the operation will be :class:`google.cloud.netapp_v1.types.KmsConfig` - KmsConfig is the customer managed encryption key(CMEK) + KmsConfig is the customer-managed encryption key(CMEK) configuration. """ @@ -8535,6 +8560,766 @@ def sample_delete_quota_rule(): # Done; return the response. return response + def restore_backup_files( + self, + request: Optional[Union[volume.RestoreBackupFilesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Restore files from a backup to a volume. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + def sample_restore_backup_files(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.RestoreBackupFilesRequest( + name="name_value", + backup="backup_value", + file_list=['file_list_value1', 'file_list_value2'], + ) + + # Make the request + operation = client.restore_backup_files(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.netapp_v1.types.RestoreBackupFilesRequest, dict]): + The request object. RestoreBackupFilesRequest restores + files from a backup to a volume. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.netapp_v1.types.RestoreBackupFilesResponse` + RestoreBackupFilesResponse is the result of + RestoreBackupFilesRequest. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, volume.RestoreBackupFilesRequest): + request = volume.RestoreBackupFilesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.restore_backup_files] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + volume.RestoreBackupFilesResponse, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_host_groups( + self, + request: Optional[Union[host_group.ListHostGroupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListHostGroupsPager: + r"""Returns a list of host groups in a ``location``. Use ``-`` as + location to list host groups across all locations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + def sample_list_host_groups(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.ListHostGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_host_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.netapp_v1.types.ListHostGroupsRequest, dict]): + The request object. ListHostGroupsRequest for listing + host groups. + parent (str): + Required. Parent value for + ListHostGroupsRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.netapp_v1.services.net_app.pagers.ListHostGroupsPager: + ListHostGroupsResponse is the + response to a ListHostGroupsRequest. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, host_group.ListHostGroupsRequest): + request = host_group.ListHostGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_host_groups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListHostGroupsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_host_group( + self, + request: Optional[Union[host_group.GetHostGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> host_group.HostGroup: + r"""Returns details of the specified host group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + def sample_get_host_group(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.GetHostGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_host_group(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.netapp_v1.types.GetHostGroupRequest, dict]): + The request object. GetHostGroupRequest for getting a + host group. + name (str): + Required. The resource name of the host group. Format: + ``projects/{project_number}/locations/{location_id}/hostGroups/{host_group_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.netapp_v1.types.HostGroup: + Host group is a collection of hosts + that can be used for accessing a Block + Volume. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, host_group.GetHostGroupRequest): + request = host_group.GetHostGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_host_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_host_group( + self, + request: Optional[Union[gcn_host_group.CreateHostGroupRequest, dict]] = None, + *, + parent: Optional[str] = None, + host_group: Optional[gcn_host_group.HostGroup] = None, + host_group_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a new host group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + def sample_create_host_group(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + host_group = netapp_v1.HostGroup() + host_group.type_ = "ISCSI_INITIATOR" + host_group.hosts = ['hosts_value1', 'hosts_value2'] + host_group.os_type = "ESXI" + + request = netapp_v1.CreateHostGroupRequest( + parent="parent_value", + host_group=host_group, + host_group_id="host_group_id_value", + ) + + # Make the request + operation = client.create_host_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.netapp_v1.types.CreateHostGroupRequest, dict]): + The request object. CreateHostGroupRequest for creating a + host group. + parent (str): + Required. Parent value for + CreateHostGroupRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + host_group (google.cloud.netapp_v1.types.HostGroup): + Required. Fields of the host group to + create. + + This corresponds to the ``host_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + host_group_id (str): + Required. ID of the host group to + create. Must be unique within the parent + resource. Must contain only letters, + numbers, and hyphen, with the first + character a letter or underscore, the + last a letter or underscore or a number, + and a 63 character maximum. + + This corresponds to the ``host_group_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.netapp_v1.types.HostGroup` Host group is a collection of hosts that can be used for accessing a Block + Volume. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, host_group, host_group_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcn_host_group.CreateHostGroupRequest): + request = gcn_host_group.CreateHostGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if host_group is not None: + request.host_group = host_group + if host_group_id is not None: + request.host_group_id = host_group_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_host_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_host_group.HostGroup, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_host_group( + self, + request: Optional[Union[gcn_host_group.UpdateHostGroupRequest, dict]] = None, + *, + host_group: Optional[gcn_host_group.HostGroup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates an existing host group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + def sample_update_host_group(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + host_group = netapp_v1.HostGroup() + host_group.type_ = "ISCSI_INITIATOR" + host_group.hosts = ['hosts_value1', 'hosts_value2'] + host_group.os_type = "ESXI" + + request = netapp_v1.UpdateHostGroupRequest( + host_group=host_group, + ) + + # Make the request + operation = client.update_host_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.netapp_v1.types.UpdateHostGroupRequest, dict]): + The request object. UpdateHostGroupRequest for updating a + host group. + host_group (google.cloud.netapp_v1.types.HostGroup): + Required. The host group to update. The host group's + ``name`` field is used to identify the host group. + Format: + ``projects/{project_number}/locations/{location_id}/hostGroups/{host_group_id}``. + + This corresponds to the ``host_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.netapp_v1.types.HostGroup` Host group is a collection of hosts that can be used for accessing a Block + Volume. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [host_group, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcn_host_group.UpdateHostGroupRequest): + request = gcn_host_group.UpdateHostGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if host_group is not None: + request.host_group = host_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_host_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("host_group.name", request.host_group.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_host_group.HostGroup, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_host_group( + self, + request: Optional[Union[host_group.DeleteHostGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a host group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + def sample_delete_host_group(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.DeleteHostGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_host_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.netapp_v1.types.DeleteHostGroupRequest, dict]): + The request object. DeleteHostGroupRequest for deleting a + single host group. + name (str): + Required. The resource name of the host group. Format: + ``projects/{project_number}/locations/{location_id}/hostGroups/{host_group_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, host_group.DeleteHostGroupRequest): + request = host_group.DeleteHostGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_host_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "NetAppClient": return self diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/pagers.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/pagers.py index 702f66428904..743a0cdd7c24 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/pagers.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/pagers.py @@ -43,6 +43,7 @@ backup, backup_policy, backup_vault, + host_group, kms, quota_rule, replication, @@ -1614,3 +1615,159 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListHostGroupsPager: + """A pager for iterating through ``list_host_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.netapp_v1.types.ListHostGroupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``host_groups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListHostGroups`` requests and continue to iterate + through the ``host_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.netapp_v1.types.ListHostGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., host_group.ListHostGroupsResponse], + request: host_group.ListHostGroupsRequest, + response: host_group.ListHostGroupsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.netapp_v1.types.ListHostGroupsRequest): + The initial request object. + response (google.cloud.netapp_v1.types.ListHostGroupsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = host_group.ListHostGroupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[host_group.ListHostGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[host_group.HostGroup]: + for page in self.pages: + yield from page.host_groups + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListHostGroupsAsyncPager: + """A pager for iterating through ``list_host_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.netapp_v1.types.ListHostGroupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``host_groups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListHostGroups`` requests and continue to iterate + through the ``host_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.netapp_v1.types.ListHostGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[host_group.ListHostGroupsResponse]], + request: host_group.ListHostGroupsRequest, + response: host_group.ListHostGroupsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.netapp_v1.types.ListHostGroupsRequest): + The initial request object. + response (google.cloud.netapp_v1.types.ListHostGroupsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = host_group.ListHostGroupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[host_group.ListHostGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[host_group.HostGroup]: + async def async_generator(): + async for page in self.pages: + for response in page.host_groups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/base.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/base.py index 0b4c327ef230..939828bc49e8 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/base.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/base.py @@ -36,6 +36,8 @@ from google.cloud.netapp_v1.types import backup_policy as gcn_backup_policy from google.cloud.netapp_v1.types import backup_vault from google.cloud.netapp_v1.types import backup_vault as gcn_backup_vault +from google.cloud.netapp_v1.types import host_group +from google.cloud.netapp_v1.types import host_group as gcn_host_group from google.cloud.netapp_v1.types import kms from google.cloud.netapp_v1.types import quota_rule from google.cloud.netapp_v1.types import quota_rule as gcn_quota_rule @@ -615,6 +617,36 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.restore_backup_files: gapic_v1.method.wrap_method( + self.restore_backup_files, + default_timeout=None, + client_info=client_info, + ), + self.list_host_groups: gapic_v1.method.wrap_method( + self.list_host_groups, + default_timeout=None, + client_info=client_info, + ), + self.get_host_group: gapic_v1.method.wrap_method( + self.get_host_group, + default_timeout=None, + client_info=client_info, + ), + self.create_host_group: gapic_v1.method.wrap_method( + self.create_host_group, + default_timeout=None, + client_info=client_info, + ), + self.update_host_group: gapic_v1.method.wrap_method( + self.update_host_group, + default_timeout=None, + client_info=client_info, + ), + self.delete_host_group: gapic_v1.method.wrap_method( + self.delete_host_group, + default_timeout=None, + client_info=client_info, + ), self.get_location: gapic_v1.method.wrap_method( self.get_location, default_timeout=None, @@ -1221,6 +1253,63 @@ def delete_quota_rule( ]: raise NotImplementedError() + @property + def restore_backup_files( + self, + ) -> Callable[ + [volume.RestoreBackupFilesRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_host_groups( + self, + ) -> Callable[ + [host_group.ListHostGroupsRequest], + Union[ + host_group.ListHostGroupsResponse, + Awaitable[host_group.ListHostGroupsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_host_group( + self, + ) -> Callable[ + [host_group.GetHostGroupRequest], + Union[host_group.HostGroup, Awaitable[host_group.HostGroup]], + ]: + raise NotImplementedError() + + @property + def create_host_group( + self, + ) -> Callable[ + [gcn_host_group.CreateHostGroupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_host_group( + self, + ) -> Callable[ + [gcn_host_group.UpdateHostGroupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_host_group( + self, + ) -> Callable[ + [host_group.DeleteHostGroupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc.py index 1790046d5038..46f149b9c7c0 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc.py @@ -38,6 +38,8 @@ from google.cloud.netapp_v1.types import backup_policy as gcn_backup_policy from google.cloud.netapp_v1.types import backup_vault from google.cloud.netapp_v1.types import backup_vault as gcn_backup_vault +from google.cloud.netapp_v1.types import host_group +from google.cloud.netapp_v1.types import host_group as gcn_host_group from google.cloud.netapp_v1.types import kms from google.cloud.netapp_v1.types import quota_rule from google.cloud.netapp_v1.types import quota_rule as gcn_quota_rule @@ -1984,6 +1986,165 @@ def delete_quota_rule( ) return self._stubs["delete_quota_rule"] + @property + def restore_backup_files( + self, + ) -> Callable[[volume.RestoreBackupFilesRequest], operations_pb2.Operation]: + r"""Return a callable for the restore backup files method over gRPC. + + Restore files from a backup to a volume. + + Returns: + Callable[[~.RestoreBackupFilesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restore_backup_files" not in self._stubs: + self._stubs["restore_backup_files"] = self._logged_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/RestoreBackupFiles", + request_serializer=volume.RestoreBackupFilesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["restore_backup_files"] + + @property + def list_host_groups( + self, + ) -> Callable[ + [host_group.ListHostGroupsRequest], host_group.ListHostGroupsResponse + ]: + r"""Return a callable for the list host groups method over gRPC. + + Returns a list of host groups in a ``location``. Use ``-`` as + location to list host groups across all locations. + + Returns: + Callable[[~.ListHostGroupsRequest], + ~.ListHostGroupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_host_groups" not in self._stubs: + self._stubs["list_host_groups"] = self._logged_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/ListHostGroups", + request_serializer=host_group.ListHostGroupsRequest.serialize, + response_deserializer=host_group.ListHostGroupsResponse.deserialize, + ) + return self._stubs["list_host_groups"] + + @property + def get_host_group( + self, + ) -> Callable[[host_group.GetHostGroupRequest], host_group.HostGroup]: + r"""Return a callable for the get host group method over gRPC. + + Returns details of the specified host group. + + Returns: + Callable[[~.GetHostGroupRequest], + ~.HostGroup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_host_group" not in self._stubs: + self._stubs["get_host_group"] = self._logged_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/GetHostGroup", + request_serializer=host_group.GetHostGroupRequest.serialize, + response_deserializer=host_group.HostGroup.deserialize, + ) + return self._stubs["get_host_group"] + + @property + def create_host_group( + self, + ) -> Callable[[gcn_host_group.CreateHostGroupRequest], operations_pb2.Operation]: + r"""Return a callable for the create host group method over gRPC. + + Creates a new host group. + + Returns: + Callable[[~.CreateHostGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_host_group" not in self._stubs: + self._stubs["create_host_group"] = self._logged_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/CreateHostGroup", + request_serializer=gcn_host_group.CreateHostGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_host_group"] + + @property + def update_host_group( + self, + ) -> Callable[[gcn_host_group.UpdateHostGroupRequest], operations_pb2.Operation]: + r"""Return a callable for the update host group method over gRPC. + + Updates an existing host group. + + Returns: + Callable[[~.UpdateHostGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_host_group" not in self._stubs: + self._stubs["update_host_group"] = self._logged_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/UpdateHostGroup", + request_serializer=gcn_host_group.UpdateHostGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_host_group"] + + @property + def delete_host_group( + self, + ) -> Callable[[host_group.DeleteHostGroupRequest], operations_pb2.Operation]: + r"""Return a callable for the delete host group method over gRPC. + + Deletes a host group. + + Returns: + Callable[[~.DeleteHostGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_host_group" not in self._stubs: + self._stubs["delete_host_group"] = self._logged_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/DeleteHostGroup", + request_serializer=host_group.DeleteHostGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_host_group"] + def close(self): self._logged_channel.close() diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc_asyncio.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc_asyncio.py index 434b8008357d..11f2220722fd 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc_asyncio.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc_asyncio.py @@ -41,6 +41,8 @@ from google.cloud.netapp_v1.types import backup_policy as gcn_backup_policy from google.cloud.netapp_v1.types import backup_vault from google.cloud.netapp_v1.types import backup_vault as gcn_backup_vault +from google.cloud.netapp_v1.types import host_group +from google.cloud.netapp_v1.types import host_group as gcn_host_group from google.cloud.netapp_v1.types import kms from google.cloud.netapp_v1.types import quota_rule from google.cloud.netapp_v1.types import quota_rule as gcn_quota_rule @@ -2060,6 +2062,173 @@ def delete_quota_rule( ) return self._stubs["delete_quota_rule"] + @property + def restore_backup_files( + self, + ) -> Callable[ + [volume.RestoreBackupFilesRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the restore backup files method over gRPC. + + Restore files from a backup to a volume. + + Returns: + Callable[[~.RestoreBackupFilesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restore_backup_files" not in self._stubs: + self._stubs["restore_backup_files"] = self._logged_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/RestoreBackupFiles", + request_serializer=volume.RestoreBackupFilesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["restore_backup_files"] + + @property + def list_host_groups( + self, + ) -> Callable[ + [host_group.ListHostGroupsRequest], Awaitable[host_group.ListHostGroupsResponse] + ]: + r"""Return a callable for the list host groups method over gRPC. + + Returns a list of host groups in a ``location``. Use ``-`` as + location to list host groups across all locations. + + Returns: + Callable[[~.ListHostGroupsRequest], + Awaitable[~.ListHostGroupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_host_groups" not in self._stubs: + self._stubs["list_host_groups"] = self._logged_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/ListHostGroups", + request_serializer=host_group.ListHostGroupsRequest.serialize, + response_deserializer=host_group.ListHostGroupsResponse.deserialize, + ) + return self._stubs["list_host_groups"] + + @property + def get_host_group( + self, + ) -> Callable[[host_group.GetHostGroupRequest], Awaitable[host_group.HostGroup]]: + r"""Return a callable for the get host group method over gRPC. + + Returns details of the specified host group. + + Returns: + Callable[[~.GetHostGroupRequest], + Awaitable[~.HostGroup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_host_group" not in self._stubs: + self._stubs["get_host_group"] = self._logged_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/GetHostGroup", + request_serializer=host_group.GetHostGroupRequest.serialize, + response_deserializer=host_group.HostGroup.deserialize, + ) + return self._stubs["get_host_group"] + + @property + def create_host_group( + self, + ) -> Callable[ + [gcn_host_group.CreateHostGroupRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create host group method over gRPC. + + Creates a new host group. + + Returns: + Callable[[~.CreateHostGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_host_group" not in self._stubs: + self._stubs["create_host_group"] = self._logged_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/CreateHostGroup", + request_serializer=gcn_host_group.CreateHostGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_host_group"] + + @property + def update_host_group( + self, + ) -> Callable[ + [gcn_host_group.UpdateHostGroupRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update host group method over gRPC. + + Updates an existing host group. + + Returns: + Callable[[~.UpdateHostGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_host_group" not in self._stubs: + self._stubs["update_host_group"] = self._logged_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/UpdateHostGroup", + request_serializer=gcn_host_group.UpdateHostGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_host_group"] + + @property + def delete_host_group( + self, + ) -> Callable[ + [host_group.DeleteHostGroupRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete host group method over gRPC. + + Deletes a host group. + + Returns: + Callable[[~.DeleteHostGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_host_group" not in self._stubs: + self._stubs["delete_host_group"] = self._logged_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/DeleteHostGroup", + request_serializer=host_group.DeleteHostGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_host_group"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -2525,6 +2694,36 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.restore_backup_files: self._wrap_method( + self.restore_backup_files, + default_timeout=None, + client_info=client_info, + ), + self.list_host_groups: self._wrap_method( + self.list_host_groups, + default_timeout=None, + client_info=client_info, + ), + self.get_host_group: self._wrap_method( + self.get_host_group, + default_timeout=None, + client_info=client_info, + ), + self.create_host_group: self._wrap_method( + self.create_host_group, + default_timeout=None, + client_info=client_info, + ), + self.update_host_group: self._wrap_method( + self.update_host_group, + default_timeout=None, + client_info=client_info, + ), + self.delete_host_group: self._wrap_method( + self.delete_host_group, + default_timeout=None, + client_info=client_info, + ), self.get_location: self._wrap_method( self.get_location, default_timeout=None, diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/rest.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/rest.py index a8742d069c2c..d3a58553ba06 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/rest.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/rest.py @@ -38,6 +38,8 @@ from google.cloud.netapp_v1.types import backup_policy as gcn_backup_policy from google.cloud.netapp_v1.types import backup_vault from google.cloud.netapp_v1.types import backup_vault as gcn_backup_vault +from google.cloud.netapp_v1.types import host_group +from google.cloud.netapp_v1.types import host_group as gcn_host_group from google.cloud.netapp_v1.types import kms from google.cloud.netapp_v1.types import quota_rule from google.cloud.netapp_v1.types import quota_rule as gcn_quota_rule @@ -124,6 +126,14 @@ def post_create_backup_vault(self, response): logging.log(f"Received response: {response}") return response + def pre_create_host_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_host_group(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_kms_config(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -204,6 +214,14 @@ def post_delete_backup_vault(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_host_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_host_group(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_kms_config(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -300,6 +318,14 @@ def post_get_backup_vault(self, response): logging.log(f"Received response: {response}") return response + def pre_get_host_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_host_group(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_kms_config(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -380,6 +406,14 @@ def post_list_backup_vaults(self, response): logging.log(f"Received response: {response}") return response + def pre_list_host_groups(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_host_groups(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_kms_configs(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -428,6 +462,14 @@ def post_list_volumes(self, response): logging.log(f"Received response: {response}") return response + def pre_restore_backup_files(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_restore_backup_files(self, response): + logging.log(f"Received response: {response}") + return response + def pre_resume_replication(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -508,6 +550,14 @@ def post_update_backup_vault(self, response): logging.log(f"Received response: {response}") return response + def pre_update_host_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_host_group(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_kms_config(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -771,6 +821,54 @@ def post_create_backup_vault_with_metadata( """ return response, metadata + def pre_create_host_group( + self, + request: gcn_host_group.CreateHostGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcn_host_group.CreateHostGroupRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for create_host_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetApp server. + """ + return request, metadata + + def post_create_host_group( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_host_group + + DEPRECATED. Please use the `post_create_host_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetApp server but before + it is returned to user code. This `post_create_host_group` interceptor runs + before the `post_create_host_group_with_metadata` interceptor. + """ + return response + + def post_create_host_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_host_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetApp server but before it is returned to user code. + + We recommend only using this `post_create_host_group_with_metadata` + interceptor in new development instead of the `post_create_host_group` interceptor. + When both interceptors are used, this `post_create_host_group_with_metadata` interceptor runs after the + `post_create_host_group` interceptor. The (possibly modified) response returned by + `post_create_host_group` will be passed to + `post_create_host_group_with_metadata`. + """ + return response, metadata + def pre_create_kms_config( self, request: kms.CreateKmsConfigRequest, @@ -1248,6 +1346,54 @@ def post_delete_backup_vault_with_metadata( """ return response, metadata + def pre_delete_host_group( + self, + request: host_group.DeleteHostGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + host_group.DeleteHostGroupRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_host_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetApp server. + """ + return request, metadata + + def post_delete_host_group( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_host_group + + DEPRECATED. Please use the `post_delete_host_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetApp server but before + it is returned to user code. This `post_delete_host_group` interceptor runs + before the `post_delete_host_group_with_metadata` interceptor. + """ + return response + + def post_delete_host_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_host_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetApp server but before it is returned to user code. + + We recommend only using this `post_delete_host_group_with_metadata` + interceptor in new development instead of the `post_delete_host_group` interceptor. + When both interceptors are used, this `post_delete_host_group_with_metadata` interceptor runs after the + `post_delete_host_group` interceptor. The (possibly modified) response returned by + `post_delete_host_group` will be passed to + `post_delete_host_group_with_metadata`. + """ + return response, metadata + def pre_delete_kms_config( self, request: kms.DeleteKmsConfigRequest, @@ -1813,6 +1959,52 @@ def post_get_backup_vault_with_metadata( """ return response, metadata + def pre_get_host_group( + self, + request: host_group.GetHostGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[host_group.GetHostGroupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_host_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetApp server. + """ + return request, metadata + + def post_get_host_group( + self, response: host_group.HostGroup + ) -> host_group.HostGroup: + """Post-rpc interceptor for get_host_group + + DEPRECATED. Please use the `post_get_host_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetApp server but before + it is returned to user code. This `post_get_host_group` interceptor runs + before the `post_get_host_group_with_metadata` interceptor. + """ + return response + + def post_get_host_group_with_metadata( + self, + response: host_group.HostGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[host_group.HostGroup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_host_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetApp server but before it is returned to user code. + + We recommend only using this `post_get_host_group_with_metadata` + interceptor in new development instead of the `post_get_host_group` interceptor. + When both interceptors are used, this `post_get_host_group_with_metadata` interceptor runs after the + `post_get_host_group` interceptor. The (possibly modified) response returned by + `post_get_host_group` will be passed to + `post_get_host_group_with_metadata`. + """ + return response, metadata + def pre_get_kms_config( self, request: kms.GetKmsConfigRequest, @@ -2282,6 +2474,56 @@ def post_list_backup_vaults_with_metadata( """ return response, metadata + def pre_list_host_groups( + self, + request: host_group.ListHostGroupsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + host_group.ListHostGroupsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_host_groups + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetApp server. + """ + return request, metadata + + def post_list_host_groups( + self, response: host_group.ListHostGroupsResponse + ) -> host_group.ListHostGroupsResponse: + """Post-rpc interceptor for list_host_groups + + DEPRECATED. Please use the `post_list_host_groups_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetApp server but before + it is returned to user code. This `post_list_host_groups` interceptor runs + before the `post_list_host_groups_with_metadata` interceptor. + """ + return response + + def post_list_host_groups_with_metadata( + self, + response: host_group.ListHostGroupsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + host_group.ListHostGroupsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_host_groups + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetApp server but before it is returned to user code. + + We recommend only using this `post_list_host_groups_with_metadata` + interceptor in new development instead of the `post_list_host_groups` interceptor. + When both interceptors are used, this `post_list_host_groups_with_metadata` interceptor runs after the + `post_list_host_groups` interceptor. The (possibly modified) response returned by + `post_list_host_groups` will be passed to + `post_list_host_groups_with_metadata`. + """ + return response, metadata + def pre_list_kms_configs( self, request: kms.ListKmsConfigsRequest, @@ -2570,6 +2812,54 @@ def post_list_volumes_with_metadata( """ return response, metadata + def pre_restore_backup_files( + self, + request: volume.RestoreBackupFilesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + volume.RestoreBackupFilesRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for restore_backup_files + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetApp server. + """ + return request, metadata + + def post_restore_backup_files( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for restore_backup_files + + DEPRECATED. Please use the `post_restore_backup_files_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetApp server but before + it is returned to user code. This `post_restore_backup_files` interceptor runs + before the `post_restore_backup_files_with_metadata` interceptor. + """ + return response + + def post_restore_backup_files_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for restore_backup_files + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetApp server but before it is returned to user code. + + We recommend only using this `post_restore_backup_files_with_metadata` + interceptor in new development instead of the `post_restore_backup_files` interceptor. + When both interceptors are used, this `post_restore_backup_files_with_metadata` interceptor runs after the + `post_restore_backup_files` interceptor. The (possibly modified) response returned by + `post_restore_backup_files` will be passed to + `post_restore_backup_files_with_metadata`. + """ + return response, metadata + def pre_resume_replication( self, request: replication.ResumeReplicationRequest, @@ -3051,6 +3341,54 @@ def post_update_backup_vault_with_metadata( """ return response, metadata + def pre_update_host_group( + self, + request: gcn_host_group.UpdateHostGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcn_host_group.UpdateHostGroupRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for update_host_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetApp server. + """ + return request, metadata + + def post_update_host_group( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_host_group + + DEPRECATED. Please use the `post_update_host_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetApp server but before + it is returned to user code. This `post_update_host_group` interceptor runs + before the `post_update_host_group_with_metadata` interceptor. + """ + return response + + def post_update_host_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_host_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetApp server but before it is returned to user code. + + We recommend only using this `post_update_host_group_with_metadata` + interceptor in new development instead of the `post_update_host_group` interceptor. + When both interceptors are used, this `post_update_host_group_with_metadata` interceptor runs after the + `post_update_host_group` interceptor. The (possibly modified) response returned by + `post_update_host_group` will be passed to + `post_update_host_group_with_metadata`. + """ + return response, metadata + def pre_update_kms_config( self, request: kms.UpdateKmsConfigRequest, @@ -4345,11 +4683,11 @@ def __call__( ) return resp - class _CreateKmsConfig( - _BaseNetAppRestTransport._BaseCreateKmsConfig, NetAppRestStub + class _CreateHostGroup( + _BaseNetAppRestTransport._BaseCreateHostGroup, NetAppRestStub ): def __hash__(self): - return hash("NetAppRestTransport.CreateKmsConfig") + return hash("NetAppRestTransport.CreateHostGroup") @staticmethod def _get_response( @@ -4376,18 +4714,18 @@ def _get_response( def __call__( self, - request: kms.CreateKmsConfigRequest, + request: gcn_host_group.CreateHostGroupRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the create kms config method over HTTP. + r"""Call the create host group method over HTTP. Args: - request (~.kms.CreateKmsConfigRequest): - The request object. CreateKmsConfigRequest creates a KMS - Config. + request (~.gcn_host_group.CreateHostGroupRequest): + The request object. CreateHostGroupRequest for creating a + host group. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4405,25 +4743,25 @@ def __call__( """ http_options = ( - _BaseNetAppRestTransport._BaseCreateKmsConfig._get_http_options() + _BaseNetAppRestTransport._BaseCreateHostGroup._get_http_options() ) - request, metadata = self._interceptor.pre_create_kms_config( + request, metadata = self._interceptor.pre_create_host_group( request, metadata ) transcoded_request = ( - _BaseNetAppRestTransport._BaseCreateKmsConfig._get_transcoded_request( + _BaseNetAppRestTransport._BaseCreateHostGroup._get_transcoded_request( http_options, request ) ) - body = _BaseNetAppRestTransport._BaseCreateKmsConfig._get_request_body_json( + body = _BaseNetAppRestTransport._BaseCreateHostGroup._get_request_body_json( transcoded_request ) # Jsonify the query params query_params = ( - _BaseNetAppRestTransport._BaseCreateKmsConfig._get_query_params_json( + _BaseNetAppRestTransport._BaseCreateHostGroup._get_query_params_json( transcoded_request ) ) @@ -4446,17 +4784,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.netapp_v1.NetAppClient.CreateKmsConfig", + f"Sending request for google.cloud.netapp_v1.NetAppClient.CreateHostGroup", extra={ "serviceName": "google.cloud.netapp.v1.NetApp", - "rpcName": "CreateKmsConfig", + "rpcName": "CreateHostGroup", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = NetAppRestTransport._CreateKmsConfig._get_response( + response = NetAppRestTransport._CreateHostGroup._get_response( self._host, metadata, query_params, @@ -4475,9 +4813,9 @@ def __call__( resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_kms_config(resp) + resp = self._interceptor.post_create_host_group(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_kms_config_with_metadata( + resp, _ = self._interceptor.post_create_host_group_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -4493,21 +4831,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.netapp_v1.NetAppClient.create_kms_config", + "Received response for google.cloud.netapp_v1.NetAppClient.create_host_group", extra={ "serviceName": "google.cloud.netapp.v1.NetApp", - "rpcName": "CreateKmsConfig", + "rpcName": "CreateHostGroup", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _CreateQuotaRule( - _BaseNetAppRestTransport._BaseCreateQuotaRule, NetAppRestStub + class _CreateKmsConfig( + _BaseNetAppRestTransport._BaseCreateKmsConfig, NetAppRestStub ): def __hash__(self): - return hash("NetAppRestTransport.CreateQuotaRule") + return hash("NetAppRestTransport.CreateKmsConfig") @staticmethod def _get_response( @@ -4534,18 +4872,18 @@ def _get_response( def __call__( self, - request: gcn_quota_rule.CreateQuotaRuleRequest, + request: kms.CreateKmsConfigRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the create quota rule method over HTTP. + r"""Call the create kms config method over HTTP. Args: - request (~.gcn_quota_rule.CreateQuotaRuleRequest): - The request object. CreateQuotaRuleRequest for creating a - quota rule. + request (~.kms.CreateKmsConfigRequest): + The request object. CreateKmsConfigRequest creates a KMS + Config. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4563,14 +4901,172 @@ def __call__( """ http_options = ( - _BaseNetAppRestTransport._BaseCreateQuotaRule._get_http_options() + _BaseNetAppRestTransport._BaseCreateKmsConfig._get_http_options() ) - request, metadata = self._interceptor.pre_create_quota_rule( + request, metadata = self._interceptor.pre_create_kms_config( request, metadata ) transcoded_request = ( - _BaseNetAppRestTransport._BaseCreateQuotaRule._get_transcoded_request( + _BaseNetAppRestTransport._BaseCreateKmsConfig._get_transcoded_request( + http_options, request + ) + ) + + body = _BaseNetAppRestTransport._BaseCreateKmsConfig._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = ( + _BaseNetAppRestTransport._BaseCreateKmsConfig._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.netapp_v1.NetAppClient.CreateKmsConfig", + extra={ + "serviceName": "google.cloud.netapp.v1.NetApp", + "rpcName": "CreateKmsConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = NetAppRestTransport._CreateKmsConfig._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_kms_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_kms_config_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.netapp_v1.NetAppClient.create_kms_config", + extra={ + "serviceName": "google.cloud.netapp.v1.NetApp", + "rpcName": "CreateKmsConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateQuotaRule( + _BaseNetAppRestTransport._BaseCreateQuotaRule, NetAppRestStub + ): + def __hash__(self): + return hash("NetAppRestTransport.CreateQuotaRule") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gcn_quota_rule.CreateQuotaRuleRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create quota rule method over HTTP. + + Args: + request (~.gcn_quota_rule.CreateQuotaRuleRequest): + The request object. CreateQuotaRuleRequest for creating a + quota rule. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseNetAppRestTransport._BaseCreateQuotaRule._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_quota_rule( + request, metadata + ) + transcoded_request = ( + _BaseNetAppRestTransport._BaseCreateQuotaRule._get_transcoded_request( http_options, request ) ) @@ -5885,11 +6381,11 @@ def __call__( ) return resp - class _DeleteKmsConfig( - _BaseNetAppRestTransport._BaseDeleteKmsConfig, NetAppRestStub + class _DeleteHostGroup( + _BaseNetAppRestTransport._BaseDeleteHostGroup, NetAppRestStub ): def __hash__(self): - return hash("NetAppRestTransport.DeleteKmsConfig") + return hash("NetAppRestTransport.DeleteHostGroup") @staticmethod def _get_response( @@ -5915,18 +6411,18 @@ def _get_response( def __call__( self, - request: kms.DeleteKmsConfigRequest, + request: host_group.DeleteHostGroupRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the delete kms config method over HTTP. + r"""Call the delete host group method over HTTP. Args: - request (~.kms.DeleteKmsConfigRequest): - The request object. DeleteKmsConfigRequest deletes a KMS - Config. + request (~.host_group.DeleteHostGroupRequest): + The request object. DeleteHostGroupRequest for deleting a + single host group. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -5944,21 +6440,21 @@ def __call__( """ http_options = ( - _BaseNetAppRestTransport._BaseDeleteKmsConfig._get_http_options() + _BaseNetAppRestTransport._BaseDeleteHostGroup._get_http_options() ) - request, metadata = self._interceptor.pre_delete_kms_config( + request, metadata = self._interceptor.pre_delete_host_group( request, metadata ) transcoded_request = ( - _BaseNetAppRestTransport._BaseDeleteKmsConfig._get_transcoded_request( + _BaseNetAppRestTransport._BaseDeleteHostGroup._get_transcoded_request( http_options, request ) ) # Jsonify the query params query_params = ( - _BaseNetAppRestTransport._BaseDeleteKmsConfig._get_query_params_json( + _BaseNetAppRestTransport._BaseDeleteHostGroup._get_query_params_json( transcoded_request ) ) @@ -5981,17 +6477,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.netapp_v1.NetAppClient.DeleteKmsConfig", + f"Sending request for google.cloud.netapp_v1.NetAppClient.DeleteHostGroup", extra={ "serviceName": "google.cloud.netapp.v1.NetApp", - "rpcName": "DeleteKmsConfig", + "rpcName": "DeleteHostGroup", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = NetAppRestTransport._DeleteKmsConfig._get_response( + response = NetAppRestTransport._DeleteHostGroup._get_response( self._host, metadata, query_params, @@ -6009,9 +6505,9 @@ def __call__( resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_kms_config(resp) + resp = self._interceptor.post_delete_host_group(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_kms_config_with_metadata( + resp, _ = self._interceptor.post_delete_host_group_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -6027,21 +6523,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.netapp_v1.NetAppClient.delete_kms_config", + "Received response for google.cloud.netapp_v1.NetAppClient.delete_host_group", extra={ "serviceName": "google.cloud.netapp.v1.NetApp", - "rpcName": "DeleteKmsConfig", + "rpcName": "DeleteHostGroup", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _DeleteQuotaRule( - _BaseNetAppRestTransport._BaseDeleteQuotaRule, NetAppRestStub + class _DeleteKmsConfig( + _BaseNetAppRestTransport._BaseDeleteKmsConfig, NetAppRestStub ): def __hash__(self): - return hash("NetAppRestTransport.DeleteQuotaRule") + return hash("NetAppRestTransport.DeleteKmsConfig") @staticmethod def _get_response( @@ -6067,18 +6563,18 @@ def _get_response( def __call__( self, - request: quota_rule.DeleteQuotaRuleRequest, + request: kms.DeleteKmsConfigRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the delete quota rule method over HTTP. + r"""Call the delete kms config method over HTTP. Args: - request (~.quota_rule.DeleteQuotaRuleRequest): - The request object. DeleteQuotaRuleRequest for deleting a - single quota rule. + request (~.kms.DeleteKmsConfigRequest): + The request object. DeleteKmsConfigRequest deletes a KMS + Config. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -6096,21 +6592,21 @@ def __call__( """ http_options = ( - _BaseNetAppRestTransport._BaseDeleteQuotaRule._get_http_options() + _BaseNetAppRestTransport._BaseDeleteKmsConfig._get_http_options() ) - request, metadata = self._interceptor.pre_delete_quota_rule( + request, metadata = self._interceptor.pre_delete_kms_config( request, metadata ) transcoded_request = ( - _BaseNetAppRestTransport._BaseDeleteQuotaRule._get_transcoded_request( + _BaseNetAppRestTransport._BaseDeleteKmsConfig._get_transcoded_request( http_options, request ) ) # Jsonify the query params query_params = ( - _BaseNetAppRestTransport._BaseDeleteQuotaRule._get_query_params_json( + _BaseNetAppRestTransport._BaseDeleteKmsConfig._get_query_params_json( transcoded_request ) ) @@ -6133,17 +6629,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.netapp_v1.NetAppClient.DeleteQuotaRule", + f"Sending request for google.cloud.netapp_v1.NetAppClient.DeleteKmsConfig", extra={ "serviceName": "google.cloud.netapp.v1.NetApp", - "rpcName": "DeleteQuotaRule", + "rpcName": "DeleteKmsConfig", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = NetAppRestTransport._DeleteQuotaRule._get_response( + response = NetAppRestTransport._DeleteKmsConfig._get_response( self._host, metadata, query_params, @@ -6161,9 +6657,9 @@ def __call__( resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_quota_rule(resp) + resp = self._interceptor.post_delete_kms_config(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_quota_rule_with_metadata( + resp, _ = self._interceptor.post_delete_kms_config_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -6179,21 +6675,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.netapp_v1.NetAppClient.delete_quota_rule", + "Received response for google.cloud.netapp_v1.NetAppClient.delete_kms_config", extra={ "serviceName": "google.cloud.netapp.v1.NetApp", - "rpcName": "DeleteQuotaRule", + "rpcName": "DeleteKmsConfig", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _DeleteReplication( - _BaseNetAppRestTransport._BaseDeleteReplication, NetAppRestStub + class _DeleteQuotaRule( + _BaseNetAppRestTransport._BaseDeleteQuotaRule, NetAppRestStub ): def __hash__(self): - return hash("NetAppRestTransport.DeleteReplication") + return hash("NetAppRestTransport.DeleteQuotaRule") @staticmethod def _get_response( @@ -6219,18 +6715,18 @@ def _get_response( def __call__( self, - request: replication.DeleteReplicationRequest, + request: quota_rule.DeleteQuotaRuleRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the delete replication method over HTTP. + r"""Call the delete quota rule method over HTTP. Args: - request (~.replication.DeleteReplicationRequest): - The request object. DeleteReplicationRequest deletes a - replication. + request (~.quota_rule.DeleteQuotaRuleRequest): + The request object. DeleteQuotaRuleRequest for deleting a + single quota rule. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -6248,21 +6744,21 @@ def __call__( """ http_options = ( - _BaseNetAppRestTransport._BaseDeleteReplication._get_http_options() + _BaseNetAppRestTransport._BaseDeleteQuotaRule._get_http_options() ) - request, metadata = self._interceptor.pre_delete_replication( + request, metadata = self._interceptor.pre_delete_quota_rule( request, metadata ) transcoded_request = ( - _BaseNetAppRestTransport._BaseDeleteReplication._get_transcoded_request( + _BaseNetAppRestTransport._BaseDeleteQuotaRule._get_transcoded_request( http_options, request ) ) # Jsonify the query params query_params = ( - _BaseNetAppRestTransport._BaseDeleteReplication._get_query_params_json( + _BaseNetAppRestTransport._BaseDeleteQuotaRule._get_query_params_json( transcoded_request ) ) @@ -6285,17 +6781,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.netapp_v1.NetAppClient.DeleteReplication", + f"Sending request for google.cloud.netapp_v1.NetAppClient.DeleteQuotaRule", extra={ "serviceName": "google.cloud.netapp.v1.NetApp", - "rpcName": "DeleteReplication", + "rpcName": "DeleteQuotaRule", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = NetAppRestTransport._DeleteReplication._get_response( + response = NetAppRestTransport._DeleteQuotaRule._get_response( self._host, metadata, query_params, @@ -6313,9 +6809,9 @@ def __call__( resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_replication(resp) + resp = self._interceptor.post_delete_quota_rule(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_replication_with_metadata( + resp, _ = self._interceptor.post_delete_quota_rule_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -6331,19 +6827,171 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.netapp_v1.NetAppClient.delete_replication", + "Received response for google.cloud.netapp_v1.NetAppClient.delete_quota_rule", extra={ "serviceName": "google.cloud.netapp.v1.NetApp", - "rpcName": "DeleteReplication", + "rpcName": "DeleteQuotaRule", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _DeleteSnapshot(_BaseNetAppRestTransport._BaseDeleteSnapshot, NetAppRestStub): - def __hash__(self): - return hash("NetAppRestTransport.DeleteSnapshot") + class _DeleteReplication( + _BaseNetAppRestTransport._BaseDeleteReplication, NetAppRestStub + ): + def __hash__(self): + return hash("NetAppRestTransport.DeleteReplication") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: replication.DeleteReplicationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete replication method over HTTP. + + Args: + request (~.replication.DeleteReplicationRequest): + The request object. DeleteReplicationRequest deletes a + replication. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseNetAppRestTransport._BaseDeleteReplication._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_replication( + request, metadata + ) + transcoded_request = ( + _BaseNetAppRestTransport._BaseDeleteReplication._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseNetAppRestTransport._BaseDeleteReplication._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.netapp_v1.NetAppClient.DeleteReplication", + extra={ + "serviceName": "google.cloud.netapp.v1.NetApp", + "rpcName": "DeleteReplication", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = NetAppRestTransport._DeleteReplication._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_replication(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_replication_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.netapp_v1.NetAppClient.delete_replication", + extra={ + "serviceName": "google.cloud.netapp.v1.NetApp", + "rpcName": "DeleteReplication", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteSnapshot(_BaseNetAppRestTransport._BaseDeleteSnapshot, NetAppRestStub): + def __hash__(self): + return hash("NetAppRestTransport.DeleteSnapshot") @staticmethod def _get_response( @@ -7702,9 +8350,9 @@ def __call__( ) return resp - class _GetKmsConfig(_BaseNetAppRestTransport._BaseGetKmsConfig, NetAppRestStub): + class _GetHostGroup(_BaseNetAppRestTransport._BaseGetHostGroup, NetAppRestStub): def __hash__(self): - return hash("NetAppRestTransport.GetKmsConfig") + return hash("NetAppRestTransport.GetHostGroup") @staticmethod def _get_response( @@ -7730,18 +8378,18 @@ def _get_response( def __call__( self, - request: kms.GetKmsConfigRequest, + request: host_group.GetHostGroupRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> kms.KmsConfig: - r"""Call the get kms config method over HTTP. + ) -> host_group.HostGroup: + r"""Call the get host group method over HTTP. Args: - request (~.kms.GetKmsConfigRequest): - The request object. GetKmsConfigRequest gets a KMS - Config. + request (~.host_group.GetHostGroupRequest): + The request object. GetHostGroupRequest for getting a + host group. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -7751,26 +8399,27 @@ def __call__( be of type `bytes`. Returns: - ~.kms.KmsConfig: - KmsConfig is the customer managed - encryption key(CMEK) configuration. + ~.host_group.HostGroup: + Host group is a collection of hosts + that can be used for accessing a Block + Volume. """ http_options = ( - _BaseNetAppRestTransport._BaseGetKmsConfig._get_http_options() + _BaseNetAppRestTransport._BaseGetHostGroup._get_http_options() ) - request, metadata = self._interceptor.pre_get_kms_config(request, metadata) + request, metadata = self._interceptor.pre_get_host_group(request, metadata) transcoded_request = ( - _BaseNetAppRestTransport._BaseGetKmsConfig._get_transcoded_request( + _BaseNetAppRestTransport._BaseGetHostGroup._get_transcoded_request( http_options, request ) ) # Jsonify the query params query_params = ( - _BaseNetAppRestTransport._BaseGetKmsConfig._get_query_params_json( + _BaseNetAppRestTransport._BaseGetHostGroup._get_query_params_json( transcoded_request ) ) @@ -7793,17 +8442,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.netapp_v1.NetAppClient.GetKmsConfig", + f"Sending request for google.cloud.netapp_v1.NetAppClient.GetHostGroup", extra={ "serviceName": "google.cloud.netapp.v1.NetApp", - "rpcName": "GetKmsConfig", + "rpcName": "GetHostGroup", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = NetAppRestTransport._GetKmsConfig._get_response( + response = NetAppRestTransport._GetHostGroup._get_response( self._host, metadata, query_params, @@ -7818,21 +8467,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = kms.KmsConfig() - pb_resp = kms.KmsConfig.pb(resp) + resp = host_group.HostGroup() + pb_resp = host_group.HostGroup.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_kms_config(resp) + resp = self._interceptor.post_get_host_group(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_kms_config_with_metadata( + resp, _ = self._interceptor.post_get_host_group_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = kms.KmsConfig.to_json(response) + response_payload = host_group.HostGroup.to_json(response) except: response_payload = None http_response = { @@ -7841,19 +8490,19 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.netapp_v1.NetAppClient.get_kms_config", + "Received response for google.cloud.netapp_v1.NetAppClient.get_host_group", extra={ "serviceName": "google.cloud.netapp.v1.NetApp", - "rpcName": "GetKmsConfig", + "rpcName": "GetHostGroup", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetQuotaRule(_BaseNetAppRestTransport._BaseGetQuotaRule, NetAppRestStub): + class _GetKmsConfig(_BaseNetAppRestTransport._BaseGetKmsConfig, NetAppRestStub): def __hash__(self): - return hash("NetAppRestTransport.GetQuotaRule") + return hash("NetAppRestTransport.GetKmsConfig") @staticmethod def _get_response( @@ -7879,18 +8528,18 @@ def _get_response( def __call__( self, - request: quota_rule.GetQuotaRuleRequest, + request: kms.GetKmsConfigRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> quota_rule.QuotaRule: - r"""Call the get quota rule method over HTTP. + ) -> kms.KmsConfig: + r"""Call the get kms config method over HTTP. Args: - request (~.quota_rule.GetQuotaRuleRequest): - The request object. GetQuotaRuleRequest for getting a - quota rule. + request (~.kms.GetKmsConfigRequest): + The request object. GetKmsConfigRequest gets a KMS + Config. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -7900,28 +8549,26 @@ def __call__( be of type `bytes`. Returns: - ~.quota_rule.QuotaRule: - QuotaRule specifies the maximum disk - space a user or group can use within a - volume. They can be used for creating - default and individual quota rules. + ~.kms.KmsConfig: + KmsConfig is the customer-managed + encryption key(CMEK) configuration. """ http_options = ( - _BaseNetAppRestTransport._BaseGetQuotaRule._get_http_options() + _BaseNetAppRestTransport._BaseGetKmsConfig._get_http_options() ) - request, metadata = self._interceptor.pre_get_quota_rule(request, metadata) + request, metadata = self._interceptor.pre_get_kms_config(request, metadata) transcoded_request = ( - _BaseNetAppRestTransport._BaseGetQuotaRule._get_transcoded_request( + _BaseNetAppRestTransport._BaseGetKmsConfig._get_transcoded_request( http_options, request ) ) # Jsonify the query params query_params = ( - _BaseNetAppRestTransport._BaseGetQuotaRule._get_query_params_json( + _BaseNetAppRestTransport._BaseGetKmsConfig._get_query_params_json( transcoded_request ) ) @@ -7944,17 +8591,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.netapp_v1.NetAppClient.GetQuotaRule", + f"Sending request for google.cloud.netapp_v1.NetAppClient.GetKmsConfig", extra={ "serviceName": "google.cloud.netapp.v1.NetApp", - "rpcName": "GetQuotaRule", + "rpcName": "GetKmsConfig", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = NetAppRestTransport._GetQuotaRule._get_response( + response = NetAppRestTransport._GetKmsConfig._get_response( self._host, metadata, query_params, @@ -7969,21 +8616,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = quota_rule.QuotaRule() - pb_resp = quota_rule.QuotaRule.pb(resp) + resp = kms.KmsConfig() + pb_resp = kms.KmsConfig.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_quota_rule(resp) + resp = self._interceptor.post_get_kms_config(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_quota_rule_with_metadata( + resp, _ = self._interceptor.post_get_kms_config_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = quota_rule.QuotaRule.to_json(response) + response_payload = kms.KmsConfig.to_json(response) except: response_payload = None http_response = { @@ -7992,19 +8639,19 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.netapp_v1.NetAppClient.get_quota_rule", + "Received response for google.cloud.netapp_v1.NetAppClient.get_kms_config", extra={ "serviceName": "google.cloud.netapp.v1.NetApp", - "rpcName": "GetQuotaRule", + "rpcName": "GetKmsConfig", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetReplication(_BaseNetAppRestTransport._BaseGetReplication, NetAppRestStub): + class _GetQuotaRule(_BaseNetAppRestTransport._BaseGetQuotaRule, NetAppRestStub): def __hash__(self): - return hash("NetAppRestTransport.GetReplication") + return hash("NetAppRestTransport.GetQuotaRule") @staticmethod def _get_response( @@ -8030,18 +8677,18 @@ def _get_response( def __call__( self, - request: replication.GetReplicationRequest, + request: quota_rule.GetQuotaRuleRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> replication.Replication: - r"""Call the get replication method over HTTP. + ) -> quota_rule.QuotaRule: + r"""Call the get quota rule method over HTTP. Args: - request (~.replication.GetReplicationRequest): - The request object. GetReplicationRequest gets the state - of a replication. + request (~.quota_rule.GetQuotaRuleRequest): + The request object. GetQuotaRuleRequest for getting a + quota rule. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -8051,11 +8698,162 @@ def __call__( be of type `bytes`. Returns: - ~.replication.Replication: - Replication is a nested resource - under Volume, that describes a - cross-region replication relationship - between 2 volumes in different regions. + ~.quota_rule.QuotaRule: + QuotaRule specifies the maximum disk + space a user or group can use within a + volume. They can be used for creating + default and individual quota rules. + + """ + + http_options = ( + _BaseNetAppRestTransport._BaseGetQuotaRule._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_quota_rule(request, metadata) + transcoded_request = ( + _BaseNetAppRestTransport._BaseGetQuotaRule._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseNetAppRestTransport._BaseGetQuotaRule._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.netapp_v1.NetAppClient.GetQuotaRule", + extra={ + "serviceName": "google.cloud.netapp.v1.NetApp", + "rpcName": "GetQuotaRule", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = NetAppRestTransport._GetQuotaRule._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = quota_rule.QuotaRule() + pb_resp = quota_rule.QuotaRule.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_quota_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_quota_rule_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = quota_rule.QuotaRule.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.netapp_v1.NetAppClient.get_quota_rule", + extra={ + "serviceName": "google.cloud.netapp.v1.NetApp", + "rpcName": "GetQuotaRule", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetReplication(_BaseNetAppRestTransport._BaseGetReplication, NetAppRestStub): + def __hash__(self): + return hash("NetAppRestTransport.GetReplication") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: replication.GetReplicationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> replication.Replication: + r"""Call the get replication method over HTTP. + + Args: + request (~.replication.GetReplicationRequest): + The request object. GetReplicationRequest gets the state + of a replication. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.replication.Replication: + Replication is a nested resource + under Volume, that describes a + cross-region replication relationship + between 2 volumes in different regions. """ @@ -9207,6 +10005,159 @@ def __call__( ) return resp + class _ListHostGroups(_BaseNetAppRestTransport._BaseListHostGroups, NetAppRestStub): + def __hash__(self): + return hash("NetAppRestTransport.ListHostGroups") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: host_group.ListHostGroupsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> host_group.ListHostGroupsResponse: + r"""Call the list host groups method over HTTP. + + Args: + request (~.host_group.ListHostGroupsRequest): + The request object. ListHostGroupsRequest for listing + host groups. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.host_group.ListHostGroupsResponse: + ListHostGroupsResponse is the + response to a ListHostGroupsRequest. + + """ + + http_options = ( + _BaseNetAppRestTransport._BaseListHostGroups._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_host_groups( + request, metadata + ) + transcoded_request = ( + _BaseNetAppRestTransport._BaseListHostGroups._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseNetAppRestTransport._BaseListHostGroups._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.netapp_v1.NetAppClient.ListHostGroups", + extra={ + "serviceName": "google.cloud.netapp.v1.NetApp", + "rpcName": "ListHostGroups", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = NetAppRestTransport._ListHostGroups._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = host_group.ListHostGroupsResponse() + pb_resp = host_group.ListHostGroupsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_host_groups(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_host_groups_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = host_group.ListHostGroupsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.netapp_v1.NetAppClient.list_host_groups", + extra={ + "serviceName": "google.cloud.netapp.v1.NetApp", + "rpcName": "ListHostGroups", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _ListKmsConfigs(_BaseNetAppRestTransport._BaseListKmsConfigs, NetAppRestStub): def __hash__(self): return hash("NetAppRestTransport.ListKmsConfigs") @@ -9870,22 +10821,171 @@ def __call__( """ - http_options = ( - _BaseNetAppRestTransport._BaseListStoragePools._get_http_options() - ) + http_options = ( + _BaseNetAppRestTransport._BaseListStoragePools._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_storage_pools( + request, metadata + ) + transcoded_request = ( + _BaseNetAppRestTransport._BaseListStoragePools._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseNetAppRestTransport._BaseListStoragePools._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.netapp_v1.NetAppClient.ListStoragePools", + extra={ + "serviceName": "google.cloud.netapp.v1.NetApp", + "rpcName": "ListStoragePools", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = NetAppRestTransport._ListStoragePools._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = storage_pool.ListStoragePoolsResponse() + pb_resp = storage_pool.ListStoragePoolsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_storage_pools(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_storage_pools_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = storage_pool.ListStoragePoolsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.netapp_v1.NetAppClient.list_storage_pools", + extra={ + "serviceName": "google.cloud.netapp.v1.NetApp", + "rpcName": "ListStoragePools", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListVolumes(_BaseNetAppRestTransport._BaseListVolumes, NetAppRestStub): + def __hash__(self): + return hash("NetAppRestTransport.ListVolumes") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: volume.ListVolumesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> volume.ListVolumesResponse: + r"""Call the list volumes method over HTTP. + + Args: + request (~.volume.ListVolumesRequest): + The request object. Message for requesting list of + Volumes + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.volume.ListVolumesResponse: + Message for response to listing + Volumes + + """ + + http_options = _BaseNetAppRestTransport._BaseListVolumes._get_http_options() - request, metadata = self._interceptor.pre_list_storage_pools( - request, metadata - ) + request, metadata = self._interceptor.pre_list_volumes(request, metadata) transcoded_request = ( - _BaseNetAppRestTransport._BaseListStoragePools._get_transcoded_request( + _BaseNetAppRestTransport._BaseListVolumes._get_transcoded_request( http_options, request ) ) # Jsonify the query params query_params = ( - _BaseNetAppRestTransport._BaseListStoragePools._get_query_params_json( + _BaseNetAppRestTransport._BaseListVolumes._get_query_params_json( transcoded_request ) ) @@ -9908,17 +11008,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.netapp_v1.NetAppClient.ListStoragePools", + f"Sending request for google.cloud.netapp_v1.NetAppClient.ListVolumes", extra={ "serviceName": "google.cloud.netapp.v1.NetApp", - "rpcName": "ListStoragePools", + "rpcName": "ListVolumes", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = NetAppRestTransport._ListStoragePools._get_response( + response = NetAppRestTransport._ListVolumes._get_response( self._host, metadata, query_params, @@ -9933,23 +11033,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = storage_pool.ListStoragePoolsResponse() - pb_resp = storage_pool.ListStoragePoolsResponse.pb(resp) + resp = volume.ListVolumesResponse() + pb_resp = volume.ListVolumesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_storage_pools(resp) + resp = self._interceptor.post_list_volumes(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_storage_pools_with_metadata( + resp, _ = self._interceptor.post_list_volumes_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = storage_pool.ListStoragePoolsResponse.to_json( - response - ) + response_payload = volume.ListVolumesResponse.to_json(response) except: response_payload = None http_response = { @@ -9958,19 +11056,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.netapp_v1.NetAppClient.list_storage_pools", + "Received response for google.cloud.netapp_v1.NetAppClient.list_volumes", extra={ "serviceName": "google.cloud.netapp.v1.NetApp", - "rpcName": "ListStoragePools", + "rpcName": "ListVolumes", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _ListVolumes(_BaseNetAppRestTransport._BaseListVolumes, NetAppRestStub): + class _RestoreBackupFiles( + _BaseNetAppRestTransport._BaseRestoreBackupFiles, NetAppRestStub + ): def __hash__(self): - return hash("NetAppRestTransport.ListVolumes") + return hash("NetAppRestTransport.RestoreBackupFiles") @staticmethod def _get_response( @@ -9991,23 +11091,24 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) return response def __call__( self, - request: volume.ListVolumesRequest, + request: volume.RestoreBackupFilesRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> volume.ListVolumesResponse: - r"""Call the list volumes method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the restore backup files method over HTTP. Args: - request (~.volume.ListVolumesRequest): - The request object. Message for requesting list of - Volumes + request (~.volume.RestoreBackupFilesRequest): + The request object. RestoreBackupFilesRequest restores + files from a backup to a volume. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -10017,24 +11118,33 @@ def __call__( be of type `bytes`. Returns: - ~.volume.ListVolumesResponse: - Message for response to listing - Volumes + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ - http_options = _BaseNetAppRestTransport._BaseListVolumes._get_http_options() + http_options = ( + _BaseNetAppRestTransport._BaseRestoreBackupFiles._get_http_options() + ) - request, metadata = self._interceptor.pre_list_volumes(request, metadata) - transcoded_request = ( - _BaseNetAppRestTransport._BaseListVolumes._get_transcoded_request( - http_options, request + request, metadata = self._interceptor.pre_restore_backup_files( + request, metadata + ) + transcoded_request = _BaseNetAppRestTransport._BaseRestoreBackupFiles._get_transcoded_request( + http_options, request + ) + + body = ( + _BaseNetAppRestTransport._BaseRestoreBackupFiles._get_request_body_json( + transcoded_request ) ) # Jsonify the query params query_params = ( - _BaseNetAppRestTransport._BaseListVolumes._get_query_params_json( + _BaseNetAppRestTransport._BaseRestoreBackupFiles._get_query_params_json( transcoded_request ) ) @@ -10047,7 +11157,7 @@ def __call__( ) method = transcoded_request["method"] try: - request_payload = type(request).to_json(request) + request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { @@ -10057,23 +11167,24 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.netapp_v1.NetAppClient.ListVolumes", + f"Sending request for google.cloud.netapp_v1.NetAppClient.RestoreBackupFiles", extra={ "serviceName": "google.cloud.netapp.v1.NetApp", - "rpcName": "ListVolumes", + "rpcName": "RestoreBackupFiles", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = NetAppRestTransport._ListVolumes._get_response( + response = NetAppRestTransport._RestoreBackupFiles._get_response( self._host, metadata, query_params, self._session, timeout, transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -10082,21 +11193,19 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = volume.ListVolumesResponse() - pb_resp = volume.ListVolumesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_volumes(resp) + resp = self._interceptor.post_restore_backup_files(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_volumes_with_metadata( + resp, _ = self._interceptor.post_restore_backup_files_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = volume.ListVolumesResponse.to_json(response) + response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { @@ -10105,10 +11214,10 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.netapp_v1.NetAppClient.list_volumes", + "Received response for google.cloud.netapp_v1.NetAppClient.restore_backup_files", extra={ "serviceName": "google.cloud.netapp.v1.NetApp", - "rpcName": "ListVolumes", + "rpcName": "RestoreBackupFiles", "metadata": http_response["headers"], "httpResponse": http_response, }, @@ -11688,6 +12797,164 @@ def __call__( ) return resp + class _UpdateHostGroup( + _BaseNetAppRestTransport._BaseUpdateHostGroup, NetAppRestStub + ): + def __hash__(self): + return hash("NetAppRestTransport.UpdateHostGroup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gcn_host_group.UpdateHostGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the update host group method over HTTP. + + Args: + request (~.gcn_host_group.UpdateHostGroupRequest): + The request object. UpdateHostGroupRequest for updating a + host group. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseNetAppRestTransport._BaseUpdateHostGroup._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_host_group( + request, metadata + ) + transcoded_request = ( + _BaseNetAppRestTransport._BaseUpdateHostGroup._get_transcoded_request( + http_options, request + ) + ) + + body = _BaseNetAppRestTransport._BaseUpdateHostGroup._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = ( + _BaseNetAppRestTransport._BaseUpdateHostGroup._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.netapp_v1.NetAppClient.UpdateHostGroup", + extra={ + "serviceName": "google.cloud.netapp.v1.NetApp", + "rpcName": "UpdateHostGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = NetAppRestTransport._UpdateHostGroup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_host_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_host_group_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.netapp_v1.NetAppClient.update_host_group", + extra={ + "serviceName": "google.cloud.netapp.v1.NetApp", + "rpcName": "UpdateHostGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _UpdateKmsConfig( _BaseNetAppRestTransport._BaseUpdateKmsConfig, NetAppRestStub ): @@ -12987,6 +14254,14 @@ def create_backup_vault( # In C++ this would require a dynamic_cast return self._CreateBackupVault(self._session, self._host, self._interceptor) # type: ignore + @property + def create_host_group( + self, + ) -> Callable[[gcn_host_group.CreateHostGroupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateHostGroup(self._session, self._host, self._interceptor) # type: ignore + @property def create_kms_config( self, @@ -13071,6 +14346,14 @@ def delete_backup_vault( # In C++ this would require a dynamic_cast return self._DeleteBackupVault(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_host_group( + self, + ) -> Callable[[host_group.DeleteHostGroupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteHostGroup(self._session, self._host, self._interceptor) # type: ignore + @property def delete_kms_config( self, @@ -13167,6 +14450,14 @@ def get_backup_vault( # In C++ this would require a dynamic_cast return self._GetBackupVault(self._session, self._host, self._interceptor) # type: ignore + @property + def get_host_group( + self, + ) -> Callable[[host_group.GetHostGroupRequest], host_group.HostGroup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetHostGroup(self._session, self._host, self._interceptor) # type: ignore + @property def get_kms_config(self) -> Callable[[kms.GetKmsConfigRequest], kms.KmsConfig]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. @@ -13251,6 +14542,16 @@ def list_backup_vaults( # In C++ this would require a dynamic_cast return self._ListBackupVaults(self._session, self._host, self._interceptor) # type: ignore + @property + def list_host_groups( + self, + ) -> Callable[ + [host_group.ListHostGroupsRequest], host_group.ListHostGroupsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListHostGroups(self._session, self._host, self._interceptor) # type: ignore + @property def list_kms_configs( self, @@ -13305,6 +14606,14 @@ def list_volumes( # In C++ this would require a dynamic_cast return self._ListVolumes(self._session, self._host, self._interceptor) # type: ignore + @property + def restore_backup_files( + self, + ) -> Callable[[volume.RestoreBackupFilesRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RestoreBackupFiles(self._session, self._host, self._interceptor) # type: ignore + @property def resume_replication( self, @@ -13395,6 +14704,14 @@ def update_backup_vault( # In C++ this would require a dynamic_cast return self._UpdateBackupVault(self._session, self._host, self._interceptor) # type: ignore + @property + def update_host_group( + self, + ) -> Callable[[gcn_host_group.UpdateHostGroupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateHostGroup(self._session, self._host, self._interceptor) # type: ignore + @property def update_kms_config( self, diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/rest_base.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/rest_base.py index 578011993cc0..b7a36c25a296 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/rest_base.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/rest_base.py @@ -30,6 +30,8 @@ from google.cloud.netapp_v1.types import backup_policy as gcn_backup_policy from google.cloud.netapp_v1.types import backup_vault from google.cloud.netapp_v1.types import backup_vault as gcn_backup_vault +from google.cloud.netapp_v1.types import host_group +from google.cloud.netapp_v1.types import host_group as gcn_host_group from google.cloud.netapp_v1.types import kms from google.cloud.netapp_v1.types import quota_rule from google.cloud.netapp_v1.types import quota_rule as gcn_quota_rule @@ -343,6 +345,65 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseCreateHostGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "hostGroupId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/hostGroups", + "body": "host_group", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gcn_host_group.CreateHostGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetAppRestTransport._BaseCreateHostGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseCreateKmsConfig: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -885,6 +946,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseDeleteHostGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/hostGroups/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = host_group.DeleteHostGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetAppRestTransport._BaseDeleteHostGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseDeleteKmsConfig: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -1469,6 +1577,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseGetHostGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/hostGroups/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = host_group.GetHostGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetAppRestTransport._BaseGetHostGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGetKmsConfig: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -1939,6 +2094,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseListHostGroups: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/hostGroups", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = host_group.ListHostGroupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetAppRestTransport._BaseListHostGroups._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseListKmsConfigs: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -2221,6 +2423,63 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseRestoreBackupFiles: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/volumes/*}:restore", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = volume.RestoreBackupFilesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetAppRestTransport._BaseRestoreBackupFiles._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseResumeReplication: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -2799,6 +3058,63 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseUpdateHostGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{host_group.name=projects/*/locations/*/hostGroups/*}", + "body": "host_group", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gcn_host_group.UpdateHostGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetAppRestTransport._BaseUpdateHostGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseUpdateKmsConfig: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/__init__.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/__init__.py index 2d4ba33269d8..2b2d623c3e6c 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/__init__.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/__init__.py @@ -56,10 +56,21 @@ FlexPerformance, HybridReplicationSchedule, LocationMetadata, + OsType, QosType, ServiceLevel, + StoragePoolType, UserCommands, ) +from .host_group import ( + CreateHostGroupRequest, + DeleteHostGroupRequest, + GetHostGroupRequest, + HostGroup, + ListHostGroupsRequest, + ListHostGroupsResponse, + UpdateHostGroupRequest, +) from .kms import ( CreateKmsConfigRequest, DeleteKmsConfigRequest, @@ -121,6 +132,10 @@ from .volume import ( AccessType, BackupConfig, + BlockDevice, + CacheConfig, + CacheParameters, + CachePrePopulate, CreateVolumeRequest, DailySchedule, DeleteVolumeRequest, @@ -133,6 +148,8 @@ MonthlySchedule, MountOption, Protocols, + RestoreBackupFilesRequest, + RestoreBackupFilesResponse, RestoreParameters, RestrictedAction, RevertVolumeRequest, @@ -182,8 +199,17 @@ "EncryptionType", "FlexPerformance", "HybridReplicationSchedule", + "OsType", "QosType", "ServiceLevel", + "StoragePoolType", + "CreateHostGroupRequest", + "DeleteHostGroupRequest", + "GetHostGroupRequest", + "HostGroup", + "ListHostGroupsRequest", + "ListHostGroupsResponse", + "UpdateHostGroupRequest", "CreateKmsConfigRequest", "DeleteKmsConfigRequest", "EncryptVolumesRequest", @@ -233,6 +259,10 @@ "UpdateStoragePoolRequest", "ValidateDirectoryServiceRequest", "BackupConfig", + "BlockDevice", + "CacheConfig", + "CacheParameters", + "CachePrePopulate", "CreateVolumeRequest", "DailySchedule", "DeleteVolumeRequest", @@ -244,6 +274,8 @@ "ListVolumesResponse", "MonthlySchedule", "MountOption", + "RestoreBackupFilesRequest", + "RestoreBackupFilesResponse", "RestoreParameters", "RevertVolumeRequest", "SimpleExportPolicyRule", diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/backup_vault.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/backup_vault.py index 234e78615ad2..f6d25d1da347 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/backup_vault.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/backup_vault.py @@ -70,7 +70,18 @@ class BackupVault(proto.Message): ``projects/{project_id}/locations/{location}/backupVaults/{backup_vault_id}`` backup_retention_policy (google.cloud.netapp_v1.types.BackupVault.BackupRetentionPolicy): Optional. Backup retention policy defining - the retenton of backups. + the retention of backups. + kms_config (str): + Optional. Specifies the Key Management System (KMS) + configuration to be used for backup encryption. Format: + ``projects/{project}/locations/{location}/kmsConfigs/{kms_config}`` + encryption_state (google.cloud.netapp_v1.types.BackupVault.EncryptionState): + Output only. Field indicating encryption + state of CMEK backups. + backups_crypto_key_version (str): + Output only. The crypto key version used to encrypt the + backup vault. Format: + ``projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}`` """ class State(proto.Enum): @@ -112,6 +123,28 @@ class BackupVaultType(proto.Enum): IN_REGION = 1 CROSS_REGION = 2 + class EncryptionState(proto.Enum): + r"""Encryption state of customer-managed encryption keys (CMEK) + backups. + + Values: + ENCRYPTION_STATE_UNSPECIFIED (0): + Encryption state not set. + ENCRYPTION_STATE_PENDING (1): + Encryption state is pending. + ENCRYPTION_STATE_COMPLETED (2): + Encryption is complete. + ENCRYPTION_STATE_IN_PROGRESS (3): + Encryption is in progress. + ENCRYPTION_STATE_FAILED (4): + Encryption has failed. + """ + ENCRYPTION_STATE_UNSPECIFIED = 0 + ENCRYPTION_STATE_PENDING = 1 + ENCRYPTION_STATE_COMPLETED = 2 + ENCRYPTION_STATE_IN_PROGRESS = 3 + ENCRYPTION_STATE_FAILED = 4 + class BackupRetentionPolicy(proto.Message): r"""Retention policy for backups in the backup vault @@ -211,6 +244,19 @@ class BackupRetentionPolicy(proto.Message): number=11, message=BackupRetentionPolicy, ) + kms_config: str = proto.Field( + proto.STRING, + number=12, + ) + encryption_state: EncryptionState = proto.Field( + proto.ENUM, + number=13, + enum=EncryptionState, + ) + backups_crypto_key_version: str = proto.Field( + proto.STRING, + number=14, + ) class GetBackupVaultRequest(proto.Message): diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/common.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/common.py index 14fae34d3325..6d8c8762d4b2 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/common.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/common.py @@ -26,8 +26,10 @@ "FlexPerformance", "EncryptionType", "DirectoryServiceType", + "StoragePoolType", "HybridReplicationSchedule", "QosType", + "OsType", "LocationMetadata", "UserCommands", }, @@ -104,6 +106,25 @@ class DirectoryServiceType(proto.Enum): ACTIVE_DIRECTORY = 1 +class StoragePoolType(proto.Enum): + r"""Type of storage pool + + Values: + STORAGE_POOL_TYPE_UNSPECIFIED (0): + Storage pool type is not specified. + FILE (1): + Storage pool type is file. + UNIFIED (2): + Storage pool type is unified. + UNIFIED_LARGE_CAPACITY (3): + Storage pool type is unified large capacity. + """ + STORAGE_POOL_TYPE_UNSPECIFIED = 0 + FILE = 1 + UNIFIED = 2 + UNIFIED_LARGE_CAPACITY = 3 + + class HybridReplicationSchedule(proto.Enum): r"""Schedule for Hybrid Replication. New enum values may be added in future to support different @@ -141,6 +162,25 @@ class QosType(proto.Enum): MANUAL = 2 +class OsType(proto.Enum): + r"""OS types for the host group + + Values: + OS_TYPE_UNSPECIFIED (0): + Unspecified OS Type + LINUX (1): + OS Type is Linux + WINDOWS (2): + OS Type is Windows + ESXI (3): + OS Type is VMware ESXi + """ + OS_TYPE_UNSPECIFIED = 0 + LINUX = 1 + WINDOWS = 2 + ESXI = 3 + + class LocationMetadata(proto.Message): r"""Metadata for a given [google.cloud.location.Location][google.cloud.location.Location]. @@ -155,6 +195,9 @@ class LocationMetadata(proto.Message): has_vcp (bool): Output only. Indicates if the location has VCP support. + has_ontap_proxy (bool): + Output only. Indicates if the location has + ONTAP Proxy support. """ supported_service_levels: MutableSequence["ServiceLevel"] = proto.RepeatedField( @@ -173,6 +216,10 @@ class LocationMetadata(proto.Message): proto.BOOL, number=3, ) + has_ontap_proxy: bool = proto.Field( + proto.BOOL, + number=4, + ) class UserCommands(proto.Message): diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/host_group.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/host_group.py new file mode 100644 index 000000000000..f8008f4c4aca --- /dev/null +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/host_group.py @@ -0,0 +1,307 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.netapp_v1.types import common + +__protobuf__ = proto.module( + package="google.cloud.netapp.v1", + manifest={ + "ListHostGroupsRequest", + "ListHostGroupsResponse", + "GetHostGroupRequest", + "CreateHostGroupRequest", + "UpdateHostGroupRequest", + "DeleteHostGroupRequest", + "HostGroup", + }, +) + + +class ListHostGroupsRequest(proto.Message): + r"""ListHostGroupsRequest for listing host groups. + + Attributes: + parent (str): + Required. Parent value for + ListHostGroupsRequest + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, the server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filter to apply to the request. + order_by (str): + Optional. Hint for how to order the results + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListHostGroupsResponse(proto.Message): + r"""ListHostGroupsResponse is the response to a + ListHostGroupsRequest. + + Attributes: + host_groups (MutableSequence[google.cloud.netapp_v1.types.HostGroup]): + The list of host groups. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + host_groups: MutableSequence["HostGroup"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="HostGroup", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetHostGroupRequest(proto.Message): + r"""GetHostGroupRequest for getting a host group. + + Attributes: + name (str): + Required. The resource name of the host group. Format: + ``projects/{project_number}/locations/{location_id}/hostGroups/{host_group_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateHostGroupRequest(proto.Message): + r"""CreateHostGroupRequest for creating a host group. + + Attributes: + parent (str): + Required. Parent value for + CreateHostGroupRequest + host_group (google.cloud.netapp_v1.types.HostGroup): + Required. Fields of the host group to create. + host_group_id (str): + Required. ID of the host group to create. + Must be unique within the parent resource. Must + contain only letters, numbers, and hyphen, with + the first character a letter or underscore, the + last a letter or underscore or a number, and a + 63 character maximum. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + host_group: "HostGroup" = proto.Field( + proto.MESSAGE, + number=2, + message="HostGroup", + ) + host_group_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class UpdateHostGroupRequest(proto.Message): + r"""UpdateHostGroupRequest for updating a host group. + + Attributes: + host_group (google.cloud.netapp_v1.types.HostGroup): + Required. The host group to update. The host group's + ``name`` field is used to identify the host group. Format: + ``projects/{project_number}/locations/{location_id}/hostGroups/{host_group_id}``. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to update. + """ + + host_group: "HostGroup" = proto.Field( + proto.MESSAGE, + number=1, + message="HostGroup", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteHostGroupRequest(proto.Message): + r"""DeleteHostGroupRequest for deleting a single host group. + + Attributes: + name (str): + Required. The resource name of the host group. Format: + ``projects/{project_number}/locations/{location_id}/hostGroups/{host_group_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class HostGroup(proto.Message): + r"""Host group is a collection of hosts that can be used for + accessing a Block Volume. + + Attributes: + name (str): + Identifier. The resource name of the host group. Format: + ``projects/{project_number}/locations/{location_id}/hostGroups/{host_group_id}``. + type_ (google.cloud.netapp_v1.types.HostGroup.Type): + Required. Type of the host group. + state (google.cloud.netapp_v1.types.HostGroup.State): + Output only. State of the host group. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Create time of the host group. + hosts (MutableSequence[str]): + Required. The list of hosts associated with + the host group. + os_type (google.cloud.netapp_v1.types.OsType): + Required. The OS type of the host group. It + indicates the type of operating system used by + all of the hosts in the HostGroup. All hosts in + a HostGroup must be of the same OS type. This + can be set only when creating a HostGroup. + description (str): + Optional. Description of the host group. + labels (MutableMapping[str, str]): + Optional. Labels of the host group. + """ + + class Type(proto.Enum): + r"""Types of host group. + + Values: + TYPE_UNSPECIFIED (0): + Unspecified type for host group. + ISCSI_INITIATOR (1): + iSCSI initiator host group. + """ + TYPE_UNSPECIFIED = 0 + ISCSI_INITIATOR = 1 + + class State(proto.Enum): + r"""Host group states. + + Values: + STATE_UNSPECIFIED (0): + Unspecified state for host group. + CREATING (1): + Host group is creating. + READY (2): + Host group is ready. + UPDATING (3): + Host group is updating. + DELETING (4): + Host group is deleting. + DISABLED (5): + Host group is disabled. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + UPDATING = 3 + DELETING = 4 + DISABLED = 5 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: Type = proto.Field( + proto.ENUM, + number=2, + enum=Type, + ) + state: State = proto.Field( + proto.ENUM, + number=3, + enum=State, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + hosts: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + os_type: common.OsType = proto.Field( + proto.ENUM, + number=6, + enum=common.OsType, + ) + description: str = proto.Field( + proto.STRING, + number=7, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=8, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/kms.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/kms.py index 47f7bc6f7875..36185f6285be 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/kms.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/kms.py @@ -261,16 +261,17 @@ class VerifyKmsConfigResponse(proto.Message): class KmsConfig(proto.Message): - r"""KmsConfig is the customer managed encryption key(CMEK) + r"""KmsConfig is the customer-managed encryption key(CMEK) configuration. Attributes: name (str): - Identifier. Name of the KmsConfig. + Identifier. Name of the KmsConfig. Format: + ``projects/{project}/locations/{location}/kmsConfigs/{kms_config}`` crypto_key_name (str): - Required. Customer managed crypto key resource full name. + Required. Customer-managed crypto key resource full name. Format: - projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{key}. + ``projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}`` state (google.cloud.netapp_v1.types.KmsConfig.State): Output only. State of the KmsConfig. state_details (str): diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/storage_pool.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/storage_pool.py index 15c8636fe0c7..cd5576f44efe 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/storage_pool.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/storage_pool.py @@ -326,6 +326,15 @@ class StoragePool(proto.Message): hot_tier_size_used_gib (int): Output only. Total hot tier data rounded down to the nearest GiB used by the storage pool. + type_ (google.cloud.netapp_v1.types.StoragePoolType): + Optional. Type of the storage pool. This field is used to + control whether the pool supports ``FILE`` based volumes + only or ``UNIFIED`` (both ``FILE`` and ``BLOCK``) volumes or + ``UNIFIED_LARGE_CAPACITY`` (both ``FILE`` and ``BLOCK``) + volumes with large capacity. If not specified during + creation, it defaults to ``FILE``. + + This field is a member of `oneof`_ ``_type``. """ class State(proto.Enum): @@ -490,6 +499,12 @@ class State(proto.Enum): proto.INT64, number=34, ) + type_: common.StoragePoolType = proto.Field( + proto.ENUM, + number=35, + optional=True, + enum=common.StoragePoolType, + ) class ValidateDirectoryServiceRequest(proto.Message): diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/volume.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/volume.py index 1c5b070bad5b..e59d08ae8123 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/volume.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/volume.py @@ -51,6 +51,12 @@ "BackupConfig", "TieringPolicy", "HybridReplicationParameters", + "CacheParameters", + "CacheConfig", + "CachePrePopulate", + "BlockDevice", + "RestoreBackupFilesRequest", + "RestoreBackupFilesResponse", }, ) @@ -68,11 +74,14 @@ class Protocols(proto.Enum): NFS V4 protocol SMB (3): SMB protocol + ISCSI (4): + ISCSI protocol """ PROTOCOLS_UNSPECIFIED = 0 NFSV3 = 1 NFSV4 = 2 SMB = 3 + ISCSI = 4 class AccessType(proto.Enum): @@ -474,10 +483,16 @@ class Volume(proto.Message): for the volume. throughput_mibps (float): Optional. Throughput of the volume (in MiB/s) + cache_parameters (google.cloud.netapp_v1.types.CacheParameters): + Optional. Cache parameters for the volume. hot_tier_size_used_gib (int): Output only. Total hot tier data rounded down to the nearest GiB used by the Volume. This field is only used for flex Service Level + block_devices (MutableSequence[google.cloud.netapp_v1.types.BlockDevice]): + Optional. Block devices for the volume. + Currently, only one block device is permitted + per Volume. """ class State(proto.Enum): @@ -694,10 +709,20 @@ class State(proto.Enum): proto.DOUBLE, number=41, ) + cache_parameters: "CacheParameters" = proto.Field( + proto.MESSAGE, + number=42, + message="CacheParameters", + ) hot_tier_size_used_gib: int = proto.Field( proto.INT64, number=44, ) + block_devices: MutableSequence["BlockDevice"] = proto.RepeatedField( + proto.MESSAGE, + number=45, + message="BlockDevice", + ) class ExportPolicy(proto.Message): @@ -802,21 +827,21 @@ class SimpleExportPolicyRule(proto.Message): This field is a member of `oneof`_ ``_squash_mode``. anon_uid (int): Optional. An integer representing the anonymous user ID. - Range is 0 to 4294967295. Required when squash_mode is - ROOT_SQUASH or ALL_SQUASH. + Range is 0 to ``4294967295``. Required when ``squash_mode`` + is ``ROOT_SQUASH`` or ``ALL_SQUASH``. This field is a member of `oneof`_ ``_anon_uid``. """ class SquashMode(proto.Enum): - r"""SquashMode defines how remote user privileges are restricted - when accessing an NFS export. It controls how user identities - (like root) are mapped to anonymous users to limit access and - enforce security. + r"""``SquashMode`` defines how remote user privileges are restricted + when accessing an NFS export. It controls how user identities (like + root) are mapped to anonymous users to limit access and enforce + security. Values: SQUASH_MODE_UNSPECIFIED (0): - Defaults to NO_ROOT_SQUASH. + Defaults to ``NO_ROOT_SQUASH``. NO_ROOT_SQUASH (1): The root user (UID 0) retains full access. Other users are unaffected. @@ -1438,4 +1463,344 @@ class VolumeHybridReplicationType(proto.Enum): ) +class CacheParameters(proto.Message): + r"""Cache Parameters for the volume. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + peer_volume_name (str): + Required. Name of the origin volume for the + cache volume. + peer_cluster_name (str): + Required. Name of the origin volume's ONTAP + cluster. + peer_svm_name (str): + Required. Name of the origin volume's SVM. + peer_ip_addresses (MutableSequence[str]): + Required. List of IC LIF addresses of the + origin volume's ONTAP cluster. + enable_global_file_lock (bool): + Optional. Indicates whether the cache volume + has global file lock enabled. + + This field is a member of `oneof`_ ``_enable_global_file_lock``. + cache_config (google.cloud.netapp_v1.types.CacheConfig): + Optional. Configuration of the cache volume. + cache_state (google.cloud.netapp_v1.types.CacheParameters.CacheState): + Output only. State of the cache volume + indicating the peering status. + command (str): + Output only. Copy-paste-able commands to be + used on user's ONTAP to accept peering requests. + peering_command_expiry_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. Expiration time for the peering + command to be executed on user's ONTAP. + passphrase (str): + Output only. Temporary passphrase generated + to accept cluster peering command. + state_details (str): + Output only. Detailed description of the + current cache state. + """ + + class CacheState(proto.Enum): + r"""State of the cache volume indicating the peering status. + + Values: + CACHE_STATE_UNSPECIFIED (0): + Default unspecified state. + PENDING_CLUSTER_PEERING (1): + State indicating waiting for cluster peering + to be established. + PENDING_SVM_PEERING (2): + State indicating waiting for SVM peering to + be established. + PEERED (3): + State indicating successful establishment of + peering with origin volumes's ONTAP cluster. + ERROR (4): + Terminal state wherein peering with origin + volume's ONTAP cluster has failed. + """ + CACHE_STATE_UNSPECIFIED = 0 + PENDING_CLUSTER_PEERING = 1 + PENDING_SVM_PEERING = 2 + PEERED = 3 + ERROR = 4 + + peer_volume_name: str = proto.Field( + proto.STRING, + number=1, + ) + peer_cluster_name: str = proto.Field( + proto.STRING, + number=2, + ) + peer_svm_name: str = proto.Field( + proto.STRING, + number=3, + ) + peer_ip_addresses: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + enable_global_file_lock: bool = proto.Field( + proto.BOOL, + number=5, + optional=True, + ) + cache_config: "CacheConfig" = proto.Field( + proto.MESSAGE, + number=6, + message="CacheConfig", + ) + cache_state: CacheState = proto.Field( + proto.ENUM, + number=7, + enum=CacheState, + ) + command: str = proto.Field( + proto.STRING, + number=8, + ) + peering_command_expiry_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + passphrase: str = proto.Field( + proto.STRING, + number=10, + ) + state_details: str = proto.Field( + proto.STRING, + number=12, + ) + + +class CacheConfig(proto.Message): + r"""Configuration of the cache volume. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + cache_pre_populate (google.cloud.netapp_v1.types.CachePrePopulate): + Optional. Pre-populate cache volume with data + from the origin volume. + writeback_enabled (bool): + Optional. Flag indicating whether writeback + is enabled for the FlexCache volume. + + This field is a member of `oneof`_ ``_writeback_enabled``. + cifs_change_notify_enabled (bool): + Optional. Flag indicating whether a CIFS + change notification is enabled for the FlexCache + volume. + + This field is a member of `oneof`_ ``_cifs_change_notify_enabled``. + cache_pre_populate_state (google.cloud.netapp_v1.types.CacheConfig.CachePrePopulateState): + Output only. State of the prepopulation job + indicating how the prepopulation is progressing. + """ + + class CachePrePopulateState(proto.Enum): + r"""State of the prepopulation job indicating how the + prepopulation is progressing. + + Values: + CACHE_PRE_POPULATE_STATE_UNSPECIFIED (0): + Default unspecified state. + NOT_NEEDED (1): + State representing when the most recent + create or update request did not require a + prepopulation job. + IN_PROGRESS (2): + State representing when the most recent + update request requested a prepopulation job but + it has not yet completed. + COMPLETE (3): + State representing when the most recent + update request requested a prepopulation job and + it has completed successfully. + ERROR (4): + State representing when the most recent + update request requested a prepopulation job but + the prepopulate job failed. + """ + CACHE_PRE_POPULATE_STATE_UNSPECIFIED = 0 + NOT_NEEDED = 1 + IN_PROGRESS = 2 + COMPLETE = 3 + ERROR = 4 + + cache_pre_populate: "CachePrePopulate" = proto.Field( + proto.MESSAGE, + number=1, + message="CachePrePopulate", + ) + writeback_enabled: bool = proto.Field( + proto.BOOL, + number=2, + optional=True, + ) + cifs_change_notify_enabled: bool = proto.Field( + proto.BOOL, + number=5, + optional=True, + ) + cache_pre_populate_state: CachePrePopulateState = proto.Field( + proto.ENUM, + number=6, + enum=CachePrePopulateState, + ) + + +class CachePrePopulate(proto.Message): + r"""Pre-populate cache volume with data from the origin volume. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + path_list (MutableSequence[str]): + Optional. List of directory-paths to be + pre-populated for the FlexCache volume. + exclude_path_list (MutableSequence[str]): + Optional. List of directory-paths to be + excluded for pre-population for the FlexCache + volume. + recursion (bool): + Optional. Flag indicating whether the directories listed + with the ``path_list`` need to be recursively pre-populated. + + This field is a member of `oneof`_ ``_recursion``. + """ + + path_list: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + exclude_path_list: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + recursion: bool = proto.Field( + proto.BOOL, + number=3, + optional=True, + ) + + +class BlockDevice(proto.Message): + r"""Block device represents the device(s) which are stored in the + block volume. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Optional. User-defined name for the block device, unique + within the volume. In case no user input is provided, name + will be auto-generated in the backend. The name must meet + the following requirements: + + - Be between 1 and 255 characters long. + - Contain only uppercase or lowercase letters (A-Z, a-z), + numbers (0-9), and the following special characters: "-", + "\_", "}", "{", ".". + - Spaces are not allowed. + + This field is a member of `oneof`_ ``_name``. + host_groups (MutableSequence[str]): + Optional. A list of host groups that identify hosts that can + mount the block volume. Format: + ``projects/{project_id}/locations/{location}/hostGroups/{host_group_id}`` + This field can be updated after the block device is created. + identifier (str): + Output only. Device identifier of the block volume. This + represents ``lun_serial_number`` for iSCSI volumes. + size_gib (int): + Optional. The size of the block device in GiB. Any value + provided for the ``size_gib`` field during volume creation + is ignored. The block device's size is system-managed and + will be set to match the parent Volume's ``capacity_gib``. + + This field is a member of `oneof`_ ``_size_gib``. + os_type (google.cloud.netapp_v1.types.OsType): + Required. Immutable. The OS type of the + volume. This field can't be changed after the + block device is created. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + host_groups: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + identifier: str = proto.Field( + proto.STRING, + number=3, + ) + size_gib: int = proto.Field( + proto.INT64, + number=4, + optional=True, + ) + os_type: common.OsType = proto.Field( + proto.ENUM, + number=5, + enum=common.OsType, + ) + + +class RestoreBackupFilesRequest(proto.Message): + r"""RestoreBackupFilesRequest restores files from a backup to a + volume. + + Attributes: + name (str): + Required. The volume resource name, in the format + ``projects/{project_id}/locations/{location}/volumes/{volume_id}`` + backup (str): + Required. The backup resource name, in the format + ``projects/{project_id}/locations/{location}/backupVaults/{backup_vault_id}/backups/{backup_id}`` + file_list (MutableSequence[str]): + Required. List of files to be restored, + specified by their absolute path in the source + volume. + restore_destination_path (str): + Optional. Absolute directory path in the destination volume. + This is required if the ``file_list`` is provided. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + backup: str = proto.Field( + proto.STRING, + number=2, + ) + file_list: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + restore_destination_path: str = proto.Field( + proto.STRING, + number=4, + ) + + +class RestoreBackupFilesResponse(proto.Message): + r"""RestoreBackupFilesResponse is the result of + RestoreBackupFilesRequest. + + """ + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_host_group_async.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_host_group_async.py new file mode 100644 index 000000000000..91079f97b1e0 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_host_group_async.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateHostGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_CreateHostGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +async def sample_create_host_group(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + host_group = netapp_v1.HostGroup() + host_group.type_ = "ISCSI_INITIATOR" + host_group.hosts = ["hosts_value1", "hosts_value2"] + host_group.os_type = "ESXI" + + request = netapp_v1.CreateHostGroupRequest( + parent="parent_value", + host_group=host_group, + host_group_id="host_group_id_value", + ) + + # Make the request + operation = client.create_host_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END netapp_v1_generated_NetApp_CreateHostGroup_async] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_host_group_sync.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_host_group_sync.py new file mode 100644 index 000000000000..fdcf6852142b --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_host_group_sync.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateHostGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_CreateHostGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +def sample_create_host_group(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + host_group = netapp_v1.HostGroup() + host_group.type_ = "ISCSI_INITIATOR" + host_group.hosts = ["hosts_value1", "hosts_value2"] + host_group.os_type = "ESXI" + + request = netapp_v1.CreateHostGroupRequest( + parent="parent_value", + host_group=host_group, + host_group_id="host_group_id_value", + ) + + # Make the request + operation = client.create_host_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END netapp_v1_generated_NetApp_CreateHostGroup_sync] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_volume_async.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_volume_async.py index 8f8f2d6297c2..34f5a05bb2e2 100644 --- a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_volume_async.py +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_volume_async.py @@ -43,7 +43,7 @@ async def sample_create_volume(): volume.share_name = "share_name_value" volume.storage_pool = "storage_pool_value" volume.capacity_gib = 1247 - volume.protocols = ["SMB"] + volume.protocols = ["ISCSI"] request = netapp_v1.CreateVolumeRequest( parent="parent_value", diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_volume_sync.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_volume_sync.py index b35cc4c2fe9d..5144bb200ff8 100644 --- a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_volume_sync.py +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_volume_sync.py @@ -43,7 +43,7 @@ def sample_create_volume(): volume.share_name = "share_name_value" volume.storage_pool = "storage_pool_value" volume.capacity_gib = 1247 - volume.protocols = ["SMB"] + volume.protocols = ["ISCSI"] request = netapp_v1.CreateVolumeRequest( parent="parent_value", diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_delete_host_group_async.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_delete_host_group_async.py new file mode 100644 index 000000000000..577baaf83cda --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_delete_host_group_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteHostGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_DeleteHostGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +async def sample_delete_host_group(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.DeleteHostGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_host_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END netapp_v1_generated_NetApp_DeleteHostGroup_async] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_delete_host_group_sync.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_delete_host_group_sync.py new file mode 100644 index 000000000000..488046cefcb0 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_delete_host_group_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteHostGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_DeleteHostGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +def sample_delete_host_group(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.DeleteHostGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_host_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END netapp_v1_generated_NetApp_DeleteHostGroup_sync] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_get_host_group_async.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_get_host_group_async.py new file mode 100644 index 000000000000..a6215c5cf19a --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_get_host_group_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetHostGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_GetHostGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +async def sample_get_host_group(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.GetHostGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_host_group(request=request) + + # Handle the response + print(response) + + +# [END netapp_v1_generated_NetApp_GetHostGroup_async] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_get_host_group_sync.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_get_host_group_sync.py new file mode 100644 index 000000000000..feaba4d33769 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_get_host_group_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetHostGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_GetHostGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +def sample_get_host_group(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.GetHostGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_host_group(request=request) + + # Handle the response + print(response) + + +# [END netapp_v1_generated_NetApp_GetHostGroup_sync] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_list_host_groups_async.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_list_host_groups_async.py new file mode 100644 index 000000000000..27d22722f189 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_list_host_groups_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListHostGroups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_ListHostGroups_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +async def sample_list_host_groups(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.ListHostGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_host_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END netapp_v1_generated_NetApp_ListHostGroups_async] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_list_host_groups_sync.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_list_host_groups_sync.py new file mode 100644 index 000000000000..26c2915907a6 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_list_host_groups_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListHostGroups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_ListHostGroups_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +def sample_list_host_groups(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.ListHostGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_host_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END netapp_v1_generated_NetApp_ListHostGroups_sync] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_restore_backup_files_async.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_restore_backup_files_async.py new file mode 100644 index 000000000000..74f95c5fb7f5 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_restore_backup_files_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreBackupFiles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_RestoreBackupFiles_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +async def sample_restore_backup_files(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.RestoreBackupFilesRequest( + name="name_value", + backup="backup_value", + file_list=["file_list_value1", "file_list_value2"], + ) + + # Make the request + operation = client.restore_backup_files(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END netapp_v1_generated_NetApp_RestoreBackupFiles_async] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_restore_backup_files_sync.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_restore_backup_files_sync.py new file mode 100644 index 000000000000..9bcb2dbc36e2 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_restore_backup_files_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreBackupFiles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_RestoreBackupFiles_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +def sample_restore_backup_files(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.RestoreBackupFilesRequest( + name="name_value", + backup="backup_value", + file_list=["file_list_value1", "file_list_value2"], + ) + + # Make the request + operation = client.restore_backup_files(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END netapp_v1_generated_NetApp_RestoreBackupFiles_sync] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_host_group_async.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_host_group_async.py new file mode 100644 index 000000000000..3707ef054056 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_host_group_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateHostGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_UpdateHostGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +async def sample_update_host_group(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + host_group = netapp_v1.HostGroup() + host_group.type_ = "ISCSI_INITIATOR" + host_group.hosts = ["hosts_value1", "hosts_value2"] + host_group.os_type = "ESXI" + + request = netapp_v1.UpdateHostGroupRequest( + host_group=host_group, + ) + + # Make the request + operation = client.update_host_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END netapp_v1_generated_NetApp_UpdateHostGroup_async] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_host_group_sync.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_host_group_sync.py new file mode 100644 index 000000000000..34d47999de69 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_host_group_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateHostGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_UpdateHostGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +def sample_update_host_group(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + host_group = netapp_v1.HostGroup() + host_group.type_ = "ISCSI_INITIATOR" + host_group.hosts = ["hosts_value1", "hosts_value2"] + host_group.os_type = "ESXI" + + request = netapp_v1.UpdateHostGroupRequest( + host_group=host_group, + ) + + # Make the request + operation = client.update_host_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END netapp_v1_generated_NetApp_UpdateHostGroup_sync] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_volume_async.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_volume_async.py index 80b1c9eb3292..45a0bbd24c9c 100644 --- a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_volume_async.py +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_volume_async.py @@ -43,7 +43,7 @@ async def sample_update_volume(): volume.share_name = "share_name_value" volume.storage_pool = "storage_pool_value" volume.capacity_gib = 1247 - volume.protocols = ["SMB"] + volume.protocols = ["ISCSI"] request = netapp_v1.UpdateVolumeRequest( volume=volume, diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_volume_sync.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_volume_sync.py index 16ea24ef275d..4a3c0e7b2d71 100644 --- a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_volume_sync.py +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_volume_sync.py @@ -43,7 +43,7 @@ def sample_update_volume(): volume.share_name = "share_name_value" volume.storage_pool = "storage_pool_value" volume.capacity_gib = 1247 - volume.protocols = ["SMB"] + volume.protocols = ["ISCSI"] request = netapp_v1.UpdateVolumeRequest( volume=volume, diff --git a/packages/google-cloud-netapp/samples/generated_samples/snippet_metadata_google.cloud.netapp.v1.json b/packages/google-cloud-netapp/samples/generated_samples/snippet_metadata_google.cloud.netapp.v1.json index 65bbaf1981e6..0390ad846224 100644 --- a/packages/google-cloud-netapp/samples/generated_samples/snippet_metadata_google.cloud.netapp.v1.json +++ b/packages/google-cloud-netapp/samples/generated_samples/snippet_metadata_google.cloud.netapp.v1.json @@ -719,6 +719,183 @@ ], "title": "netapp_v1_generated_net_app_create_backup_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", + "shortName": "NetAppAsyncClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.create_host_group", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.CreateHostGroup", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "CreateHostGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.CreateHostGroupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "host_group", + "type": "google.cloud.netapp_v1.types.HostGroup" + }, + { + "name": "host_group_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_host_group" + }, + "description": "Sample for CreateHostGroup", + "file": "netapp_v1_generated_net_app_create_host_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_CreateHostGroup_async", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_create_host_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.netapp_v1.NetAppClient", + "shortName": "NetAppClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppClient.create_host_group", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.CreateHostGroup", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "CreateHostGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.CreateHostGroupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "host_group", + "type": "google.cloud.netapp_v1.types.HostGroup" + }, + { + "name": "host_group_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_host_group" + }, + "description": "Sample for CreateHostGroup", + "file": "netapp_v1_generated_net_app_create_host_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_CreateHostGroup_sync", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_create_host_group_sync.py" + }, { "canonical": true, "clientMethod": { @@ -2433,19 +2610,19 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.delete_kms_config", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.delete_host_group", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.DeleteKmsConfig", + "fullName": "google.cloud.netapp.v1.NetApp.DeleteHostGroup", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "DeleteKmsConfig" + "shortName": "DeleteHostGroup" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.DeleteKmsConfigRequest" + "type": "google.cloud.netapp_v1.types.DeleteHostGroupRequest" }, { "name": "name", @@ -2465,13 +2642,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_kms_config" + "shortName": "delete_host_group" }, - "description": "Sample for DeleteKmsConfig", - "file": "netapp_v1_generated_net_app_delete_kms_config_async.py", + "description": "Sample for DeleteHostGroup", + "file": "netapp_v1_generated_net_app_delete_host_group_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_DeleteKmsConfig_async", + "regionTag": "netapp_v1_generated_NetApp_DeleteHostGroup_async", "segments": [ { "end": 55, @@ -2504,7 +2681,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_delete_kms_config_async.py" + "title": "netapp_v1_generated_net_app_delete_host_group_async.py" }, { "canonical": true, @@ -2513,19 +2690,19 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.delete_kms_config", + "fullName": "google.cloud.netapp_v1.NetAppClient.delete_host_group", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.DeleteKmsConfig", + "fullName": "google.cloud.netapp.v1.NetApp.DeleteHostGroup", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "DeleteKmsConfig" + "shortName": "DeleteHostGroup" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.DeleteKmsConfigRequest" + "type": "google.cloud.netapp_v1.types.DeleteHostGroupRequest" }, { "name": "name", @@ -2545,13 +2722,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_kms_config" + "shortName": "delete_host_group" }, - "description": "Sample for DeleteKmsConfig", - "file": "netapp_v1_generated_net_app_delete_kms_config_sync.py", + "description": "Sample for DeleteHostGroup", + "file": "netapp_v1_generated_net_app_delete_host_group_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_DeleteKmsConfig_sync", + "regionTag": "netapp_v1_generated_NetApp_DeleteHostGroup_sync", "segments": [ { "end": 55, @@ -2584,7 +2761,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_delete_kms_config_sync.py" + "title": "netapp_v1_generated_net_app_delete_host_group_sync.py" }, { "canonical": true, @@ -2594,19 +2771,19 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.delete_quota_rule", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.delete_kms_config", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.DeleteQuotaRule", + "fullName": "google.cloud.netapp.v1.NetApp.DeleteKmsConfig", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "DeleteQuotaRule" + "shortName": "DeleteKmsConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.DeleteQuotaRuleRequest" + "type": "google.cloud.netapp_v1.types.DeleteKmsConfigRequest" }, { "name": "name", @@ -2626,13 +2803,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_quota_rule" + "shortName": "delete_kms_config" }, - "description": "Sample for DeleteQuotaRule", - "file": "netapp_v1_generated_net_app_delete_quota_rule_async.py", + "description": "Sample for DeleteKmsConfig", + "file": "netapp_v1_generated_net_app_delete_kms_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_DeleteQuotaRule_async", + "regionTag": "netapp_v1_generated_NetApp_DeleteKmsConfig_async", "segments": [ { "end": 55, @@ -2665,7 +2842,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_delete_quota_rule_async.py" + "title": "netapp_v1_generated_net_app_delete_kms_config_async.py" }, { "canonical": true, @@ -2674,19 +2851,19 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.delete_quota_rule", + "fullName": "google.cloud.netapp_v1.NetAppClient.delete_kms_config", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.DeleteQuotaRule", + "fullName": "google.cloud.netapp.v1.NetApp.DeleteKmsConfig", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "DeleteQuotaRule" + "shortName": "DeleteKmsConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.DeleteQuotaRuleRequest" + "type": "google.cloud.netapp_v1.types.DeleteKmsConfigRequest" }, { "name": "name", @@ -2706,13 +2883,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_quota_rule" + "shortName": "delete_kms_config" }, - "description": "Sample for DeleteQuotaRule", - "file": "netapp_v1_generated_net_app_delete_quota_rule_sync.py", + "description": "Sample for DeleteKmsConfig", + "file": "netapp_v1_generated_net_app_delete_kms_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_DeleteQuotaRule_sync", + "regionTag": "netapp_v1_generated_NetApp_DeleteKmsConfig_sync", "segments": [ { "end": 55, @@ -2745,7 +2922,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_delete_quota_rule_sync.py" + "title": "netapp_v1_generated_net_app_delete_kms_config_sync.py" }, { "canonical": true, @@ -2755,19 +2932,19 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.delete_replication", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.delete_quota_rule", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.DeleteReplication", + "fullName": "google.cloud.netapp.v1.NetApp.DeleteQuotaRule", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "DeleteReplication" + "shortName": "DeleteQuotaRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.DeleteReplicationRequest" + "type": "google.cloud.netapp_v1.types.DeleteQuotaRuleRequest" }, { "name": "name", @@ -2787,13 +2964,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_replication" + "shortName": "delete_quota_rule" }, - "description": "Sample for DeleteReplication", - "file": "netapp_v1_generated_net_app_delete_replication_async.py", + "description": "Sample for DeleteQuotaRule", + "file": "netapp_v1_generated_net_app_delete_quota_rule_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_DeleteReplication_async", + "regionTag": "netapp_v1_generated_NetApp_DeleteQuotaRule_async", "segments": [ { "end": 55, @@ -2826,7 +3003,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_delete_replication_async.py" + "title": "netapp_v1_generated_net_app_delete_quota_rule_async.py" }, { "canonical": true, @@ -2835,19 +3012,19 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.delete_replication", + "fullName": "google.cloud.netapp_v1.NetAppClient.delete_quota_rule", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.DeleteReplication", + "fullName": "google.cloud.netapp.v1.NetApp.DeleteQuotaRule", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "DeleteReplication" + "shortName": "DeleteQuotaRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.DeleteReplicationRequest" + "type": "google.cloud.netapp_v1.types.DeleteQuotaRuleRequest" }, { "name": "name", @@ -2867,13 +3044,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_replication" + "shortName": "delete_quota_rule" }, - "description": "Sample for DeleteReplication", - "file": "netapp_v1_generated_net_app_delete_replication_sync.py", + "description": "Sample for DeleteQuotaRule", + "file": "netapp_v1_generated_net_app_delete_quota_rule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_DeleteReplication_sync", + "regionTag": "netapp_v1_generated_NetApp_DeleteQuotaRule_sync", "segments": [ { "end": 55, @@ -2906,7 +3083,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_delete_replication_sync.py" + "title": "netapp_v1_generated_net_app_delete_quota_rule_sync.py" }, { "canonical": true, @@ -2916,19 +3093,19 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.delete_snapshot", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.delete_replication", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.DeleteSnapshot", + "fullName": "google.cloud.netapp.v1.NetApp.DeleteReplication", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "DeleteSnapshot" + "shortName": "DeleteReplication" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.DeleteSnapshotRequest" + "type": "google.cloud.netapp_v1.types.DeleteReplicationRequest" }, { "name": "name", @@ -2948,11 +3125,172 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_snapshot" + "shortName": "delete_replication" }, - "description": "Sample for DeleteSnapshot", - "file": "netapp_v1_generated_net_app_delete_snapshot_async.py", - "language": "PYTHON", + "description": "Sample for DeleteReplication", + "file": "netapp_v1_generated_net_app_delete_replication_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_DeleteReplication_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_delete_replication_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.netapp_v1.NetAppClient", + "shortName": "NetAppClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppClient.delete_replication", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.DeleteReplication", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "DeleteReplication" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.DeleteReplicationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_replication" + }, + "description": "Sample for DeleteReplication", + "file": "netapp_v1_generated_net_app_delete_replication_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_DeleteReplication_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_delete_replication_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", + "shortName": "NetAppAsyncClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.delete_snapshot", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.DeleteSnapshot", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "DeleteSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.DeleteSnapshotRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_snapshot" + }, + "description": "Sample for DeleteSnapshot", + "file": "netapp_v1_generated_net_app_delete_snapshot_async.py", + "language": "PYTHON", "origin": "API_DEFINITION", "regionTag": "netapp_v1_generated_NetApp_DeleteSnapshot_async", "segments": [ @@ -4349,19 +4687,19 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.get_kms_config", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.get_host_group", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.GetKmsConfig", + "fullName": "google.cloud.netapp.v1.NetApp.GetHostGroup", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "GetKmsConfig" + "shortName": "GetHostGroup" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.GetKmsConfigRequest" + "type": "google.cloud.netapp_v1.types.GetHostGroupRequest" }, { "name": "name", @@ -4380,14 +4718,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.netapp_v1.types.KmsConfig", - "shortName": "get_kms_config" + "resultType": "google.cloud.netapp_v1.types.HostGroup", + "shortName": "get_host_group" }, - "description": "Sample for GetKmsConfig", - "file": "netapp_v1_generated_net_app_get_kms_config_async.py", + "description": "Sample for GetHostGroup", + "file": "netapp_v1_generated_net_app_get_host_group_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_GetKmsConfig_async", + "regionTag": "netapp_v1_generated_NetApp_GetHostGroup_async", "segments": [ { "end": 51, @@ -4420,7 +4758,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_get_kms_config_async.py" + "title": "netapp_v1_generated_net_app_get_host_group_async.py" }, { "canonical": true, @@ -4429,19 +4767,19 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.get_kms_config", + "fullName": "google.cloud.netapp_v1.NetAppClient.get_host_group", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.GetKmsConfig", + "fullName": "google.cloud.netapp.v1.NetApp.GetHostGroup", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "GetKmsConfig" + "shortName": "GetHostGroup" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.GetKmsConfigRequest" + "type": "google.cloud.netapp_v1.types.GetHostGroupRequest" }, { "name": "name", @@ -4460,14 +4798,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.netapp_v1.types.KmsConfig", - "shortName": "get_kms_config" + "resultType": "google.cloud.netapp_v1.types.HostGroup", + "shortName": "get_host_group" }, - "description": "Sample for GetKmsConfig", - "file": "netapp_v1_generated_net_app_get_kms_config_sync.py", + "description": "Sample for GetHostGroup", + "file": "netapp_v1_generated_net_app_get_host_group_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_GetKmsConfig_sync", + "regionTag": "netapp_v1_generated_NetApp_GetHostGroup_sync", "segments": [ { "end": 51, @@ -4500,7 +4838,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_get_kms_config_sync.py" + "title": "netapp_v1_generated_net_app_get_host_group_sync.py" }, { "canonical": true, @@ -4510,19 +4848,19 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.get_quota_rule", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.get_kms_config", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.GetQuotaRule", + "fullName": "google.cloud.netapp.v1.NetApp.GetKmsConfig", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "GetQuotaRule" + "shortName": "GetKmsConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.GetQuotaRuleRequest" + "type": "google.cloud.netapp_v1.types.GetKmsConfigRequest" }, { "name": "name", @@ -4541,14 +4879,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.netapp_v1.types.QuotaRule", - "shortName": "get_quota_rule" + "resultType": "google.cloud.netapp_v1.types.KmsConfig", + "shortName": "get_kms_config" }, - "description": "Sample for GetQuotaRule", - "file": "netapp_v1_generated_net_app_get_quota_rule_async.py", + "description": "Sample for GetKmsConfig", + "file": "netapp_v1_generated_net_app_get_kms_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_GetQuotaRule_async", + "regionTag": "netapp_v1_generated_NetApp_GetKmsConfig_async", "segments": [ { "end": 51, @@ -4581,7 +4919,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_get_quota_rule_async.py" + "title": "netapp_v1_generated_net_app_get_kms_config_async.py" }, { "canonical": true, @@ -4590,19 +4928,19 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.get_quota_rule", + "fullName": "google.cloud.netapp_v1.NetAppClient.get_kms_config", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.GetQuotaRule", + "fullName": "google.cloud.netapp.v1.NetApp.GetKmsConfig", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "GetQuotaRule" + "shortName": "GetKmsConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.GetQuotaRuleRequest" + "type": "google.cloud.netapp_v1.types.GetKmsConfigRequest" }, { "name": "name", @@ -4621,14 +4959,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.netapp_v1.types.QuotaRule", - "shortName": "get_quota_rule" + "resultType": "google.cloud.netapp_v1.types.KmsConfig", + "shortName": "get_kms_config" }, - "description": "Sample for GetQuotaRule", - "file": "netapp_v1_generated_net_app_get_quota_rule_sync.py", + "description": "Sample for GetKmsConfig", + "file": "netapp_v1_generated_net_app_get_kms_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_GetQuotaRule_sync", + "regionTag": "netapp_v1_generated_NetApp_GetKmsConfig_sync", "segments": [ { "end": 51, @@ -4661,7 +4999,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_get_quota_rule_sync.py" + "title": "netapp_v1_generated_net_app_get_kms_config_sync.py" }, { "canonical": true, @@ -4671,19 +5009,19 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.get_replication", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.get_quota_rule", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.GetReplication", + "fullName": "google.cloud.netapp.v1.NetApp.GetQuotaRule", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "GetReplication" + "shortName": "GetQuotaRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.GetReplicationRequest" + "type": "google.cloud.netapp_v1.types.GetQuotaRuleRequest" }, { "name": "name", @@ -4702,14 +5040,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.netapp_v1.types.Replication", - "shortName": "get_replication" + "resultType": "google.cloud.netapp_v1.types.QuotaRule", + "shortName": "get_quota_rule" }, - "description": "Sample for GetReplication", - "file": "netapp_v1_generated_net_app_get_replication_async.py", + "description": "Sample for GetQuotaRule", + "file": "netapp_v1_generated_net_app_get_quota_rule_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_GetReplication_async", + "regionTag": "netapp_v1_generated_NetApp_GetQuotaRule_async", "segments": [ { "end": 51, @@ -4742,7 +5080,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_get_replication_async.py" + "title": "netapp_v1_generated_net_app_get_quota_rule_async.py" }, { "canonical": true, @@ -4751,19 +5089,19 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.get_replication", + "fullName": "google.cloud.netapp_v1.NetAppClient.get_quota_rule", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.GetReplication", + "fullName": "google.cloud.netapp.v1.NetApp.GetQuotaRule", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "GetReplication" + "shortName": "GetQuotaRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.GetReplicationRequest" + "type": "google.cloud.netapp_v1.types.GetQuotaRuleRequest" }, { "name": "name", @@ -4782,8 +5120,169 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.netapp_v1.types.Replication", - "shortName": "get_replication" + "resultType": "google.cloud.netapp_v1.types.QuotaRule", + "shortName": "get_quota_rule" + }, + "description": "Sample for GetQuotaRule", + "file": "netapp_v1_generated_net_app_get_quota_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_GetQuotaRule_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_get_quota_rule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", + "shortName": "NetAppAsyncClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.get_replication", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.GetReplication", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "GetReplication" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.GetReplicationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.netapp_v1.types.Replication", + "shortName": "get_replication" + }, + "description": "Sample for GetReplication", + "file": "netapp_v1_generated_net_app_get_replication_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_GetReplication_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_get_replication_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.netapp_v1.NetAppClient", + "shortName": "NetAppClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppClient.get_replication", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.GetReplication", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "GetReplication" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.GetReplicationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.netapp_v1.types.Replication", + "shortName": "get_replication" }, "description": "Sample for GetReplication", "file": "netapp_v1_generated_net_app_get_replication_sync.py", @@ -5951,6 +6450,167 @@ ], "title": "netapp_v1_generated_net_app_list_backups_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", + "shortName": "NetAppAsyncClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.list_host_groups", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.ListHostGroups", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "ListHostGroups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.ListHostGroupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListHostGroupsAsyncPager", + "shortName": "list_host_groups" + }, + "description": "Sample for ListHostGroups", + "file": "netapp_v1_generated_net_app_list_host_groups_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_ListHostGroups_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_list_host_groups_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.netapp_v1.NetAppClient", + "shortName": "NetAppClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppClient.list_host_groups", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.ListHostGroups", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "ListHostGroups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.ListHostGroupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListHostGroupsPager", + "shortName": "list_host_groups" + }, + "description": "Sample for ListHostGroups", + "file": "netapp_v1_generated_net_app_list_host_groups_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_ListHostGroups_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_list_host_groups_sync.py" + }, { "canonical": true, "clientMethod": { @@ -6534,7 +7194,168 @@ "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.ListSnapshotsRequest" + "type": "google.cloud.netapp_v1.types.ListSnapshotsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListSnapshotsPager", + "shortName": "list_snapshots" + }, + "description": "Sample for ListSnapshots", + "file": "netapp_v1_generated_net_app_list_snapshots_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_ListSnapshots_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_list_snapshots_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", + "shortName": "NetAppAsyncClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.list_storage_pools", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.ListStoragePools", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "ListStoragePools" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.ListStoragePoolsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListStoragePoolsAsyncPager", + "shortName": "list_storage_pools" + }, + "description": "Sample for ListStoragePools", + "file": "netapp_v1_generated_net_app_list_storage_pools_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_ListStoragePools_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_list_storage_pools_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.netapp_v1.NetAppClient", + "shortName": "NetAppClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppClient.list_storage_pools", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.ListStoragePools", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "ListStoragePools" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.ListStoragePoolsRequest" }, { "name": "parent", @@ -6553,14 +7374,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListSnapshotsPager", - "shortName": "list_snapshots" + "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListStoragePoolsPager", + "shortName": "list_storage_pools" }, - "description": "Sample for ListSnapshots", - "file": "netapp_v1_generated_net_app_list_snapshots_sync.py", + "description": "Sample for ListStoragePools", + "file": "netapp_v1_generated_net_app_list_storage_pools_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_ListSnapshots_sync", + "regionTag": "netapp_v1_generated_NetApp_ListStoragePools_sync", "segments": [ { "end": 52, @@ -6593,7 +7414,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_list_snapshots_sync.py" + "title": "netapp_v1_generated_net_app_list_storage_pools_sync.py" }, { "canonical": true, @@ -6603,19 +7424,19 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.list_storage_pools", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.list_volumes", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.ListStoragePools", + "fullName": "google.cloud.netapp.v1.NetApp.ListVolumes", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "ListStoragePools" + "shortName": "ListVolumes" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.ListStoragePoolsRequest" + "type": "google.cloud.netapp_v1.types.ListVolumesRequest" }, { "name": "parent", @@ -6634,14 +7455,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListStoragePoolsAsyncPager", - "shortName": "list_storage_pools" + "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListVolumesAsyncPager", + "shortName": "list_volumes" }, - "description": "Sample for ListStoragePools", - "file": "netapp_v1_generated_net_app_list_storage_pools_async.py", + "description": "Sample for ListVolumes", + "file": "netapp_v1_generated_net_app_list_volumes_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_ListStoragePools_async", + "regionTag": "netapp_v1_generated_NetApp_ListVolumes_async", "segments": [ { "end": 52, @@ -6674,7 +7495,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_list_storage_pools_async.py" + "title": "netapp_v1_generated_net_app_list_volumes_async.py" }, { "canonical": true, @@ -6683,19 +7504,19 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.list_storage_pools", + "fullName": "google.cloud.netapp_v1.NetAppClient.list_volumes", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.ListStoragePools", + "fullName": "google.cloud.netapp.v1.NetApp.ListVolumes", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "ListStoragePools" + "shortName": "ListVolumes" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.ListStoragePoolsRequest" + "type": "google.cloud.netapp_v1.types.ListVolumesRequest" }, { "name": "parent", @@ -6714,14 +7535,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListStoragePoolsPager", - "shortName": "list_storage_pools" + "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListVolumesPager", + "shortName": "list_volumes" }, - "description": "Sample for ListStoragePools", - "file": "netapp_v1_generated_net_app_list_storage_pools_sync.py", + "description": "Sample for ListVolumes", + "file": "netapp_v1_generated_net_app_list_volumes_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_ListStoragePools_sync", + "regionTag": "netapp_v1_generated_NetApp_ListVolumes_sync", "segments": [ { "end": 52, @@ -6754,7 +7575,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_list_storage_pools_sync.py" + "title": "netapp_v1_generated_net_app_list_volumes_sync.py" }, { "canonical": true, @@ -6764,23 +7585,19 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.list_volumes", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.restore_backup_files", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.ListVolumes", + "fullName": "google.cloud.netapp.v1.NetApp.RestoreBackupFiles", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "ListVolumes" + "shortName": "RestoreBackupFiles" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.ListVolumesRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.netapp_v1.types.RestoreBackupFilesRequest" }, { "name": "retry", @@ -6795,22 +7612,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListVolumesAsyncPager", - "shortName": "list_volumes" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "restore_backup_files" }, - "description": "Sample for ListVolumes", - "file": "netapp_v1_generated_net_app_list_volumes_async.py", + "description": "Sample for RestoreBackupFiles", + "file": "netapp_v1_generated_net_app_restore_backup_files_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_ListVolumes_async", + "regionTag": "netapp_v1_generated_NetApp_RestoreBackupFiles_async", "segments": [ { - "end": 52, + "end": 57, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 57, "start": 27, "type": "SHORT" }, @@ -6820,22 +7637,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 54, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 58, + "start": 55, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_list_volumes_async.py" + "title": "netapp_v1_generated_net_app_restore_backup_files_async.py" }, { "canonical": true, @@ -6844,23 +7661,19 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.list_volumes", + "fullName": "google.cloud.netapp_v1.NetAppClient.restore_backup_files", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.ListVolumes", + "fullName": "google.cloud.netapp.v1.NetApp.RestoreBackupFiles", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "ListVolumes" + "shortName": "RestoreBackupFiles" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.ListVolumesRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.netapp_v1.types.RestoreBackupFilesRequest" }, { "name": "retry", @@ -6875,22 +7688,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListVolumesPager", - "shortName": "list_volumes" + "resultType": "google.api_core.operation.Operation", + "shortName": "restore_backup_files" }, - "description": "Sample for ListVolumes", - "file": "netapp_v1_generated_net_app_list_volumes_sync.py", + "description": "Sample for RestoreBackupFiles", + "file": "netapp_v1_generated_net_app_restore_backup_files_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_ListVolumes_sync", + "regionTag": "netapp_v1_generated_NetApp_RestoreBackupFiles_sync", "segments": [ { - "end": 52, + "end": 57, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 57, "start": 27, "type": "SHORT" }, @@ -6900,22 +7713,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 54, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 58, + "start": 55, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_list_volumes_sync.py" + "title": "netapp_v1_generated_net_app_restore_backup_files_sync.py" }, { "canonical": true, @@ -8511,6 +9324,175 @@ ], "title": "netapp_v1_generated_net_app_update_backup_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", + "shortName": "NetAppAsyncClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.update_host_group", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.UpdateHostGroup", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "UpdateHostGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.UpdateHostGroupRequest" + }, + { + "name": "host_group", + "type": "google.cloud.netapp_v1.types.HostGroup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_host_group" + }, + "description": "Sample for UpdateHostGroup", + "file": "netapp_v1_generated_net_app_update_host_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_UpdateHostGroup_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_update_host_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.netapp_v1.NetAppClient", + "shortName": "NetAppClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppClient.update_host_group", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.UpdateHostGroup", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "UpdateHostGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.UpdateHostGroupRequest" + }, + { + "name": "host_group", + "type": "google.cloud.netapp_v1.types.HostGroup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_host_group" + }, + "description": "Sample for UpdateHostGroup", + "file": "netapp_v1_generated_net_app_update_host_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_UpdateHostGroup_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_update_host_group_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py b/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py index ebb235af0b3d..6265ab7c8f30 100644 --- a/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py +++ b/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py @@ -81,7 +81,10 @@ from google.cloud.netapp_v1.types import backup_policy as gcn_backup_policy from google.cloud.netapp_v1.types import backup_vault from google.cloud.netapp_v1.types import backup_vault as gcn_backup_vault -from google.cloud.netapp_v1.types import cloud_netapp_service, common, kms +from google.cloud.netapp_v1.types import cloud_netapp_service, common +from google.cloud.netapp_v1.types import host_group +from google.cloud.netapp_v1.types import host_group as gcn_host_group +from google.cloud.netapp_v1.types import kms from google.cloud.netapp_v1.types import quota_rule from google.cloud.netapp_v1.types import quota_rule as gcn_quota_rule from google.cloud.netapp_v1.types import replication @@ -2242,6 +2245,7 @@ def test_get_storage_pool(request_type, transport: str = "grpc"): available_throughput_mibps=0.2772, cold_tier_size_used_gib=2416, hot_tier_size_used_gib=2329, + type_=common.StoragePoolType.FILE, ) response = client.get_storage_pool(request) @@ -2282,6 +2286,7 @@ def test_get_storage_pool(request_type, transport: str = "grpc"): assert math.isclose(response.available_throughput_mibps, 0.2772, rel_tol=1e-6) assert response.cold_tier_size_used_gib == 2416 assert response.hot_tier_size_used_gib == 2329 + assert response.type_ == common.StoragePoolType.FILE def test_get_storage_pool_non_empty_request_with_auto_populated_field(): @@ -2438,6 +2443,7 @@ async def test_get_storage_pool_async( available_throughput_mibps=0.2772, cold_tier_size_used_gib=2416, hot_tier_size_used_gib=2329, + type_=common.StoragePoolType.FILE, ) ) response = await client.get_storage_pool(request) @@ -2479,6 +2485,7 @@ async def test_get_storage_pool_async( assert math.isclose(response.available_throughput_mibps, 0.2772, rel_tol=1e-6) assert response.cold_tier_size_used_gib == 2416 assert response.hot_tier_size_used_gib == 2329 + assert response.type_ == common.StoragePoolType.FILE @pytest.mark.asyncio @@ -16085,6 +16092,9 @@ def test_get_backup_vault(request_type, transport: str = "grpc"): backup_region="backup_region_value", source_backup_vault="source_backup_vault_value", destination_backup_vault="destination_backup_vault_value", + kms_config="kms_config_value", + encryption_state=backup_vault.BackupVault.EncryptionState.ENCRYPTION_STATE_PENDING, + backups_crypto_key_version="backups_crypto_key_version_value", ) response = client.get_backup_vault(request) @@ -16106,6 +16116,12 @@ def test_get_backup_vault(request_type, transport: str = "grpc"): assert response.backup_region == "backup_region_value" assert response.source_backup_vault == "source_backup_vault_value" assert response.destination_backup_vault == "destination_backup_vault_value" + assert response.kms_config == "kms_config_value" + assert ( + response.encryption_state + == backup_vault.BackupVault.EncryptionState.ENCRYPTION_STATE_PENDING + ) + assert response.backups_crypto_key_version == "backups_crypto_key_version_value" def test_get_backup_vault_non_empty_request_with_auto_populated_field(): @@ -16241,6 +16257,9 @@ async def test_get_backup_vault_async( backup_region="backup_region_value", source_backup_vault="source_backup_vault_value", destination_backup_vault="destination_backup_vault_value", + kms_config="kms_config_value", + encryption_state=backup_vault.BackupVault.EncryptionState.ENCRYPTION_STATE_PENDING, + backups_crypto_key_version="backups_crypto_key_version_value", ) ) response = await client.get_backup_vault(request) @@ -16263,6 +16282,12 @@ async def test_get_backup_vault_async( assert response.backup_region == "backup_region_value" assert response.source_backup_vault == "source_backup_vault_value" assert response.destination_backup_vault == "destination_backup_vault_value" + assert response.kms_config == "kms_config_value" + assert ( + response.encryption_state + == backup_vault.BackupVault.EncryptionState.ENCRYPTION_STATE_PENDING + ) + assert response.backups_crypto_key_version == "backups_crypto_key_version_value" @pytest.mark.asyncio @@ -23480,13 +23505,82 @@ async def test_delete_quota_rule_flattened_error_async(): ) -def test_list_storage_pools_rest_use_cached_wrapped_rpc(): +@pytest.mark.parametrize( + "request_type", + [ + volume.RestoreBackupFilesRequest, + dict, + ], +) +def test_restore_backup_files(request_type, transport: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_backup_files), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.restore_backup_files(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = volume.RestoreBackupFilesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_restore_backup_files_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = volume.RestoreBackupFilesRequest( + name="name_value", + backup="backup_value", + restore_destination_path="restore_destination_path_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_backup_files), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.restore_backup_files(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == volume.RestoreBackupFilesRequest( + name="name_value", + backup="backup_value", + restore_destination_path="restore_destination_path_value", + ) + + +def test_restore_backup_files_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -23495,7 +23589,7 @@ def test_list_storage_pools_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_storage_pools in client._transport._wrapped_methods + client._transport.restore_backup_files in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -23504,253 +23598,255 @@ def test_list_storage_pools_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_storage_pools + client._transport.restore_backup_files ] = mock_rpc - request = {} - client.list_storage_pools(request) + client.restore_backup_files(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_storage_pools(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.restore_backup_files(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_storage_pools_rest_required_fields( - request_type=storage_pool.ListStoragePoolsRequest, +@pytest.mark.asyncio +async def test_restore_backup_files_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.NetAppRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - # verify fields with default values are dropped + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_storage_pools._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Ensure method has been cached + assert ( + client._client._transport.restore_backup_files + in client._client._transport._wrapped_methods + ) - # verify required fields with default values are now present + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.restore_backup_files + ] = mock_rpc - jsonified_request["parent"] = "parent_value" + request = {} + await client.restore_backup_files(request) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_storage_pools._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + await client.restore_backup_files(request) - # Designate an appropriate value for the returned response. - return_value = storage_pool.ListStoragePoolsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = storage_pool.ListStoragePoolsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) +@pytest.mark.asyncio +async def test_restore_backup_files_async( + transport: str = "grpc_asyncio", request_type=volume.RestoreBackupFilesRequest +): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response = client.list_storage_pools(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_backup_files), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.restore_backup_files(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = volume.RestoreBackupFilesRequest() + assert args[0] == request + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_list_storage_pools_rest_unset_required_fields(): - transport = transports.NetAppRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - unset_fields = transport.list_storage_pools._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) +@pytest.mark.asyncio +async def test_restore_backup_files_async_from_dict(): + await test_restore_backup_files_async(request_type=dict) -def test_list_storage_pools_rest_flattened(): +def test_restore_backup_files_field_headers(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = storage_pool.ListStoragePoolsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = volume.RestoreBackupFilesRequest() - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) + request.name = "name_value" - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = storage_pool.ListStoragePoolsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_backup_files), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.restore_backup_files(request) - client.list_storage_pools(**mock_args) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/storagePools" - % client.transport._host, - args[1], - ) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_list_storage_pools_rest_flattened_error(transport: str = "rest"): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +@pytest.mark.asyncio +async def test_restore_backup_files_field_headers_async(): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_storage_pools( - storage_pool.ListStoragePoolsRequest(), - parent="parent_value", + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = volume.RestoreBackupFilesRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_backup_files), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") ) + await client.restore_backup_files(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_list_storage_pools_rest_pager(transport: str = "rest"): +@pytest.mark.parametrize( + "request_type", + [ + host_group.ListHostGroupsRequest, + dict, + ], +) +def test_list_host_groups(request_type, transport: str = "grpc"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - storage_pool.ListStoragePoolsResponse( - storage_pools=[ - storage_pool.StoragePool(), - storage_pool.StoragePool(), - storage_pool.StoragePool(), - ], - next_page_token="abc", - ), - storage_pool.ListStoragePoolsResponse( - storage_pools=[], - next_page_token="def", - ), - storage_pool.ListStoragePoolsResponse( - storage_pools=[ - storage_pool.StoragePool(), - ], - next_page_token="ghi", - ), - storage_pool.ListStoragePoolsResponse( - storage_pools=[ - storage_pool.StoragePool(), - storage_pool.StoragePool(), - ], - ), - ) - # Two responses for two calls - response = response + response + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the values into proper Response objs - response = tuple( - storage_pool.ListStoragePoolsResponse.to_json(x) for x in response + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_host_groups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = host_group.ListHostGroupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values + response = client.list_host_groups(request) - sample_request = {"parent": "projects/sample1/locations/sample2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = host_group.ListHostGroupsRequest() + assert args[0] == request - pager = client.list_storage_pools(request=sample_request) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListHostGroupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, storage_pool.StoragePool) for i in results) - pages = list(client.list_storage_pools(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_list_host_groups_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = host_group.ListHostGroupsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) -def test_create_storage_pool_rest_use_cached_wrapped_rpc(): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_host_groups), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_host_groups(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == host_group.ListHostGroupsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_host_groups_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -23758,9 +23854,7 @@ def test_create_storage_pool_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_storage_pool in client._transport._wrapped_methods - ) + assert client._transport.list_host_groups in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -23768,85 +23862,3095 @@ def test_create_storage_pool_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_storage_pool + client._transport.list_host_groups ] = mock_rpc - request = {} - client.create_storage_pool(request) + client.list_host_groups(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + client.list_host_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_host_groups_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 wrapper_fn.reset_mock() - client.create_storage_pool(request) + # Ensure method has been cached + assert ( + client._client._transport.list_host_groups + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_host_groups + ] = mock_rpc + + request = {} + await client.list_host_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_host_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_storage_pool_rest_required_fields( - request_type=gcn_storage_pool.CreateStoragePoolRequest, +@pytest.mark.asyncio +async def test_list_host_groups_async( + transport: str = "grpc_asyncio", request_type=host_group.ListHostGroupsRequest ): - transport_class = transports.NetAppRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["storage_pool_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - # verify fields with default values are dropped - assert "storagePoolId" not in jsonified_request + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_storage_pool._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_host_groups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + host_group.ListHostGroupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_host_groups(request) - # verify required fields with default values are now present - assert "storagePoolId" in jsonified_request - assert jsonified_request["storagePoolId"] == request_init["storage_pool_id"] + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = host_group.ListHostGroupsRequest() + assert args[0] == request - jsonified_request["parent"] = "parent_value" - jsonified_request["storagePoolId"] = "storage_pool_id_value" + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListHostGroupsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_storage_pool._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("storage_pool_id",)) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "storagePoolId" in jsonified_request - assert jsonified_request["storagePoolId"] == "storage_pool_id_value" +@pytest.mark.asyncio +async def test_list_host_groups_async_from_dict(): + await test_list_host_groups_async(request_type=dict) + +def test_list_host_groups_field_headers(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = host_group.ListHostGroupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_host_groups), "__call__") as call: + call.return_value = host_group.ListHostGroupsResponse() + client.list_host_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_host_groups_field_headers_async(): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = host_group.ListHostGroupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_host_groups), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + host_group.ListHostGroupsResponse() + ) + await client.list_host_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_host_groups_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_host_groups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = host_group.ListHostGroupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_host_groups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_host_groups_flattened_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_host_groups( + host_group.ListHostGroupsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_host_groups_flattened_async(): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_host_groups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = host_group.ListHostGroupsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + host_group.ListHostGroupsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_host_groups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_host_groups_flattened_error_async(): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_host_groups( + host_group.ListHostGroupsRequest(), + parent="parent_value", + ) + + +def test_list_host_groups_pager(transport_name: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_host_groups), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + host_group.ListHostGroupsResponse( + host_groups=[ + host_group.HostGroup(), + host_group.HostGroup(), + host_group.HostGroup(), + ], + next_page_token="abc", + ), + host_group.ListHostGroupsResponse( + host_groups=[], + next_page_token="def", + ), + host_group.ListHostGroupsResponse( + host_groups=[ + host_group.HostGroup(), + ], + next_page_token="ghi", + ), + host_group.ListHostGroupsResponse( + host_groups=[ + host_group.HostGroup(), + host_group.HostGroup(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_host_groups(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, host_group.HostGroup) for i in results) + + +def test_list_host_groups_pages(transport_name: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_host_groups), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + host_group.ListHostGroupsResponse( + host_groups=[ + host_group.HostGroup(), + host_group.HostGroup(), + host_group.HostGroup(), + ], + next_page_token="abc", + ), + host_group.ListHostGroupsResponse( + host_groups=[], + next_page_token="def", + ), + host_group.ListHostGroupsResponse( + host_groups=[ + host_group.HostGroup(), + ], + next_page_token="ghi", + ), + host_group.ListHostGroupsResponse( + host_groups=[ + host_group.HostGroup(), + host_group.HostGroup(), + ], + ), + RuntimeError, + ) + pages = list(client.list_host_groups(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_host_groups_async_pager(): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_host_groups), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + host_group.ListHostGroupsResponse( + host_groups=[ + host_group.HostGroup(), + host_group.HostGroup(), + host_group.HostGroup(), + ], + next_page_token="abc", + ), + host_group.ListHostGroupsResponse( + host_groups=[], + next_page_token="def", + ), + host_group.ListHostGroupsResponse( + host_groups=[ + host_group.HostGroup(), + ], + next_page_token="ghi", + ), + host_group.ListHostGroupsResponse( + host_groups=[ + host_group.HostGroup(), + host_group.HostGroup(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_host_groups( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, host_group.HostGroup) for i in responses) + + +@pytest.mark.asyncio +async def test_list_host_groups_async_pages(): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_host_groups), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + host_group.ListHostGroupsResponse( + host_groups=[ + host_group.HostGroup(), + host_group.HostGroup(), + host_group.HostGroup(), + ], + next_page_token="abc", + ), + host_group.ListHostGroupsResponse( + host_groups=[], + next_page_token="def", + ), + host_group.ListHostGroupsResponse( + host_groups=[ + host_group.HostGroup(), + ], + next_page_token="ghi", + ), + host_group.ListHostGroupsResponse( + host_groups=[ + host_group.HostGroup(), + host_group.HostGroup(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_host_groups(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + host_group.GetHostGroupRequest, + dict, + ], +) +def test_get_host_group(request_type, transport: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_host_group), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = host_group.HostGroup( + name="name_value", + type_=host_group.HostGroup.Type.ISCSI_INITIATOR, + state=host_group.HostGroup.State.CREATING, + hosts=["hosts_value"], + os_type=common.OsType.LINUX, + description="description_value", + ) + response = client.get_host_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = host_group.GetHostGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, host_group.HostGroup) + assert response.name == "name_value" + assert response.type_ == host_group.HostGroup.Type.ISCSI_INITIATOR + assert response.state == host_group.HostGroup.State.CREATING + assert response.hosts == ["hosts_value"] + assert response.os_type == common.OsType.LINUX + assert response.description == "description_value" + + +def test_get_host_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = host_group.GetHostGroupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_host_group), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_host_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == host_group.GetHostGroupRequest( + name="name_value", + ) + + +def test_get_host_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_host_group in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_host_group] = mock_rpc + request = {} + client.get_host_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_host_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_host_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_host_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_host_group + ] = mock_rpc + + request = {} + await client.get_host_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_host_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_host_group_async( + transport: str = "grpc_asyncio", request_type=host_group.GetHostGroupRequest +): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_host_group), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + host_group.HostGroup( + name="name_value", + type_=host_group.HostGroup.Type.ISCSI_INITIATOR, + state=host_group.HostGroup.State.CREATING, + hosts=["hosts_value"], + os_type=common.OsType.LINUX, + description="description_value", + ) + ) + response = await client.get_host_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = host_group.GetHostGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, host_group.HostGroup) + assert response.name == "name_value" + assert response.type_ == host_group.HostGroup.Type.ISCSI_INITIATOR + assert response.state == host_group.HostGroup.State.CREATING + assert response.hosts == ["hosts_value"] + assert response.os_type == common.OsType.LINUX + assert response.description == "description_value" + + +@pytest.mark.asyncio +async def test_get_host_group_async_from_dict(): + await test_get_host_group_async(request_type=dict) + + +def test_get_host_group_field_headers(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = host_group.GetHostGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_host_group), "__call__") as call: + call.return_value = host_group.HostGroup() + client.get_host_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_host_group_field_headers_async(): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = host_group.GetHostGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_host_group), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + host_group.HostGroup() + ) + await client.get_host_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_host_group_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_host_group), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = host_group.HostGroup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_host_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_host_group_flattened_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_host_group( + host_group.GetHostGroupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_host_group_flattened_async(): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_host_group), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = host_group.HostGroup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + host_group.HostGroup() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_host_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_host_group_flattened_error_async(): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_host_group( + host_group.GetHostGroupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_host_group.CreateHostGroupRequest, + dict, + ], +) +def test_create_host_group(request_type, transport: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_host_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_host_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcn_host_group.CreateHostGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_host_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcn_host_group.CreateHostGroupRequest( + parent="parent_value", + host_group_id="host_group_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_host_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_host_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_host_group.CreateHostGroupRequest( + parent="parent_value", + host_group_id="host_group_id_value", + ) + + +def test_create_host_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_host_group in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_host_group + ] = mock_rpc + request = {} + client.create_host_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_host_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_host_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_host_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_host_group + ] = mock_rpc + + request = {} + await client.create_host_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_host_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_host_group_async( + transport: str = "grpc_asyncio", request_type=gcn_host_group.CreateHostGroupRequest +): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_host_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_host_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcn_host_group.CreateHostGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_host_group_async_from_dict(): + await test_create_host_group_async(request_type=dict) + + +def test_create_host_group_field_headers(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_host_group.CreateHostGroupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_host_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_host_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_host_group_field_headers_async(): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_host_group.CreateHostGroupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_host_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_host_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_host_group_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_host_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_host_group( + parent="parent_value", + host_group=gcn_host_group.HostGroup(name="name_value"), + host_group_id="host_group_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].host_group + mock_val = gcn_host_group.HostGroup(name="name_value") + assert arg == mock_val + arg = args[0].host_group_id + mock_val = "host_group_id_value" + assert arg == mock_val + + +def test_create_host_group_flattened_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_host_group( + gcn_host_group.CreateHostGroupRequest(), + parent="parent_value", + host_group=gcn_host_group.HostGroup(name="name_value"), + host_group_id="host_group_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_host_group_flattened_async(): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_host_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_host_group( + parent="parent_value", + host_group=gcn_host_group.HostGroup(name="name_value"), + host_group_id="host_group_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].host_group + mock_val = gcn_host_group.HostGroup(name="name_value") + assert arg == mock_val + arg = args[0].host_group_id + mock_val = "host_group_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_host_group_flattened_error_async(): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_host_group( + gcn_host_group.CreateHostGroupRequest(), + parent="parent_value", + host_group=gcn_host_group.HostGroup(name="name_value"), + host_group_id="host_group_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_host_group.UpdateHostGroupRequest, + dict, + ], +) +def test_update_host_group(request_type, transport: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_host_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_host_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcn_host_group.UpdateHostGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_host_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcn_host_group.UpdateHostGroupRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_host_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_host_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_host_group.UpdateHostGroupRequest() + + +def test_update_host_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_host_group in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_host_group + ] = mock_rpc + request = {} + client.update_host_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_host_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_host_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_host_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_host_group + ] = mock_rpc + + request = {} + await client.update_host_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_host_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_host_group_async( + transport: str = "grpc_asyncio", request_type=gcn_host_group.UpdateHostGroupRequest +): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_host_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_host_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcn_host_group.UpdateHostGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_host_group_async_from_dict(): + await test_update_host_group_async(request_type=dict) + + +def test_update_host_group_field_headers(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_host_group.UpdateHostGroupRequest() + + request.host_group.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_host_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_host_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "host_group.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_host_group_field_headers_async(): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_host_group.UpdateHostGroupRequest() + + request.host_group.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_host_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_host_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "host_group.name=name_value", + ) in kw["metadata"] + + +def test_update_host_group_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_host_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_host_group( + host_group=gcn_host_group.HostGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].host_group + mock_val = gcn_host_group.HostGroup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_host_group_flattened_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_host_group( + gcn_host_group.UpdateHostGroupRequest(), + host_group=gcn_host_group.HostGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_host_group_flattened_async(): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_host_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_host_group( + host_group=gcn_host_group.HostGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].host_group + mock_val = gcn_host_group.HostGroup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_host_group_flattened_error_async(): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_host_group( + gcn_host_group.UpdateHostGroupRequest(), + host_group=gcn_host_group.HostGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + host_group.DeleteHostGroupRequest, + dict, + ], +) +def test_delete_host_group(request_type, transport: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_host_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_host_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = host_group.DeleteHostGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_host_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = host_group.DeleteHostGroupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_host_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_host_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == host_group.DeleteHostGroupRequest( + name="name_value", + ) + + +def test_delete_host_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_host_group in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_host_group + ] = mock_rpc + request = {} + client.delete_host_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_host_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_host_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_host_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_host_group + ] = mock_rpc + + request = {} + await client.delete_host_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_host_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_host_group_async( + transport: str = "grpc_asyncio", request_type=host_group.DeleteHostGroupRequest +): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_host_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_host_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = host_group.DeleteHostGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_host_group_async_from_dict(): + await test_delete_host_group_async(request_type=dict) + + +def test_delete_host_group_field_headers(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = host_group.DeleteHostGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_host_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_host_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_host_group_field_headers_async(): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = host_group.DeleteHostGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_host_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_host_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_host_group_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_host_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_host_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_host_group_flattened_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_host_group( + host_group.DeleteHostGroupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_host_group_flattened_async(): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_host_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_host_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_host_group_flattened_error_async(): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_host_group( + host_group.DeleteHostGroupRequest(), + name="name_value", + ) + + +def test_list_storage_pools_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_storage_pools in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_storage_pools + ] = mock_rpc + + request = {} + client.list_storage_pools(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_storage_pools(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_storage_pools_rest_required_fields( + request_type=storage_pool.ListStoragePoolsRequest, +): + transport_class = transports.NetAppRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_storage_pools._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_storage_pools._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = storage_pool.ListStoragePoolsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = storage_pool.ListStoragePoolsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_storage_pools(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_storage_pools_rest_unset_required_fields(): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_storage_pools._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_storage_pools_rest_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = storage_pool.ListStoragePoolsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = storage_pool.ListStoragePoolsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_storage_pools(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/storagePools" + % client.transport._host, + args[1], + ) + + +def test_list_storage_pools_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_storage_pools( + storage_pool.ListStoragePoolsRequest(), + parent="parent_value", + ) + + +def test_list_storage_pools_rest_pager(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + storage_pool.ListStoragePoolsResponse( + storage_pools=[ + storage_pool.StoragePool(), + storage_pool.StoragePool(), + storage_pool.StoragePool(), + ], + next_page_token="abc", + ), + storage_pool.ListStoragePoolsResponse( + storage_pools=[], + next_page_token="def", + ), + storage_pool.ListStoragePoolsResponse( + storage_pools=[ + storage_pool.StoragePool(), + ], + next_page_token="ghi", + ), + storage_pool.ListStoragePoolsResponse( + storage_pools=[ + storage_pool.StoragePool(), + storage_pool.StoragePool(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + storage_pool.ListStoragePoolsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_storage_pools(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, storage_pool.StoragePool) for i in results) + + pages = list(client.list_storage_pools(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_create_storage_pool_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_storage_pool in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_storage_pool + ] = mock_rpc + + request = {} + client.create_storage_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_storage_pool(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_storage_pool_rest_required_fields( + request_type=gcn_storage_pool.CreateStoragePoolRequest, +): + transport_class = transports.NetAppRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["storage_pool_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "storagePoolId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_storage_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "storagePoolId" in jsonified_request + assert jsonified_request["storagePoolId"] == request_init["storage_pool_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["storagePoolId"] = "storage_pool_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_storage_pool._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("storage_pool_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "storagePoolId" in jsonified_request + assert jsonified_request["storagePoolId"] == "storage_pool_id_value" + + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_storage_pool(request) + + expected_params = [ + ( + "storagePoolId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_storage_pool_rest_unset_required_fields(): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_storage_pool._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("storagePoolId",)) + & set( + ( + "parent", + "storagePoolId", + "storagePool", + ) + ) + ) + + +def test_create_storage_pool_rest_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + storage_pool=gcn_storage_pool.StoragePool(name="name_value"), + storage_pool_id="storage_pool_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_storage_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/storagePools" + % client.transport._host, + args[1], + ) + + +def test_create_storage_pool_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_storage_pool( + gcn_storage_pool.CreateStoragePoolRequest(), + parent="parent_value", + storage_pool=gcn_storage_pool.StoragePool(name="name_value"), + storage_pool_id="storage_pool_id_value", + ) + + +def test_get_storage_pool_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_storage_pool in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_storage_pool + ] = mock_rpc + + request = {} + client.get_storage_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_storage_pool(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_storage_pool_rest_required_fields( + request_type=storage_pool.GetStoragePoolRequest, +): + transport_class = transports.NetAppRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_storage_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_storage_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = storage_pool.StoragePool() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = storage_pool.StoragePool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_storage_pool(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_storage_pool_rest_unset_required_fields(): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_storage_pool._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_storage_pool_rest_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = storage_pool.StoragePool() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/storagePools/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = storage_pool.StoragePool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_storage_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/storagePools/*}" + % client.transport._host, + args[1], + ) + + +def test_get_storage_pool_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_storage_pool( + storage_pool.GetStoragePoolRequest(), + name="name_value", + ) + + +def test_update_storage_pool_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_storage_pool in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_storage_pool + ] = mock_rpc + + request = {} + client.update_storage_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_storage_pool(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_storage_pool_rest_required_fields( + request_type=gcn_storage_pool.UpdateStoragePoolRequest, +): + transport_class = transports.NetAppRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_storage_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_storage_pool._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_storage_pool(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_storage_pool_rest_unset_required_fields(): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_storage_pool._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "updateMask", + "storagePool", + ) + ) + ) + + +def test_update_storage_pool_rest_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "storage_pool": { + "name": "projects/sample1/locations/sample2/storagePools/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + storage_pool=gcn_storage_pool.StoragePool(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_storage_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{storage_pool.name=projects/*/locations/*/storagePools/*}" + % client.transport._host, + args[1], + ) + + +def test_update_storage_pool_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_storage_pool( + gcn_storage_pool.UpdateStoragePoolRequest(), + storage_pool=gcn_storage_pool.StoragePool(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_storage_pool_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_storage_pool in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_storage_pool + ] = mock_rpc + + request = {} + client.delete_storage_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_storage_pool(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_storage_pool_rest_required_fields( + request_type=storage_pool.DeleteStoragePoolRequest, +): + transport_class = transports.NetAppRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_storage_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_storage_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_storage_pool(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_storage_pool_rest_unset_required_fields(): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_storage_pool._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_storage_pool_rest_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/storagePools/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_storage_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/storagePools/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_storage_pool_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_storage_pool( + storage_pool.DeleteStoragePoolRequest(), + name="name_value", + ) + + +def test_validate_directory_service_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.validate_directory_service + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.validate_directory_service + ] = mock_rpc + + request = {} + client.validate_directory_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.validate_directory_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_validate_directory_service_rest_required_fields( + request_type=storage_pool.ValidateDirectoryServiceRequest, +): + transport_class = transports.NetAppRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).validate_directory_service._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).validate_directory_service._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.validate_directory_service(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_validate_directory_service_rest_unset_required_fields(): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.validate_directory_service._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_switch_active_replica_zone_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.switch_active_replica_zone + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.switch_active_replica_zone + ] = mock_rpc + + request = {} + client.switch_active_replica_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.switch_active_replica_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_switch_active_replica_zone_rest_required_fields( + request_type=storage_pool.SwitchActiveReplicaZoneRequest, +): + transport_class = transports.NetAppRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).switch_active_replica_zone._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).switch_active_replica_zone._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", "method": "post", @@ -23863,38 +26967,160 @@ def test_create_storage_pool_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_storage_pool(request) + response = client.switch_active_replica_zone(request) - expected_params = [ - ( - "storagePoolId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_storage_pool_rest_unset_required_fields(): +def test_switch_active_replica_zone_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_storage_pool._get_unset_required_fields({}) + unset_fields = transport.switch_active_replica_zone._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_list_volumes_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_volumes in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_volumes] = mock_rpc + + request = {} + client.list_volumes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_volumes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_volumes_rest_required_fields(request_type=volume.ListVolumesRequest): + transport_class = transports.NetAppRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_volumes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_volumes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = volume.ListVolumesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = volume.ListVolumesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_volumes(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_volumes_rest_unset_required_fields(): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_volumes._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("storagePoolId",)) - & set( + set( ( - "parent", - "storagePoolId", - "storagePool", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) -def test_create_storage_pool_rest_flattened(): +def test_list_volumes_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23903,7 +27129,7 @@ def test_create_storage_pool_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = volume.ListVolumesResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -23911,33 +27137,32 @@ def test_create_storage_pool_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - storage_pool=gcn_storage_pool.StoragePool(name="name_value"), - storage_pool_id="storage_pool_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = volume.ListVolumesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_storage_pool(**mock_args) + client.list_volumes(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/storagePools" - % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/volumes" % client.transport._host, args[1], ) -def test_create_storage_pool_rest_flattened_error(transport: str = "rest"): +def test_list_volumes_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23946,15 +27171,74 @@ def test_create_storage_pool_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_storage_pool( - gcn_storage_pool.CreateStoragePoolRequest(), + client.list_volumes( + volume.ListVolumesRequest(), parent="parent_value", - storage_pool=gcn_storage_pool.StoragePool(name="name_value"), - storage_pool_id="storage_pool_id_value", ) -def test_get_storage_pool_rest_use_cached_wrapped_rpc(): +def test_list_volumes_rest_pager(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + volume.ListVolumesResponse( + volumes=[ + volume.Volume(), + volume.Volume(), + volume.Volume(), + ], + next_page_token="abc", + ), + volume.ListVolumesResponse( + volumes=[], + next_page_token="def", + ), + volume.ListVolumesResponse( + volumes=[ + volume.Volume(), + ], + next_page_token="ghi", + ), + volume.ListVolumesResponse( + volumes=[ + volume.Volume(), + volume.Volume(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(volume.ListVolumesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_volumes(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, volume.Volume) for i in results) + + pages = list(client.list_volumes(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_volume_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -23968,33 +27252,29 @@ def test_get_storage_pool_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_storage_pool in client._transport._wrapped_methods + assert client._transport.get_volume in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_storage_pool - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_volume] = mock_rpc request = {} - client.get_storage_pool(request) + client.get_volume(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_storage_pool(request) + client.get_volume(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_storage_pool_rest_required_fields( - request_type=storage_pool.GetStoragePoolRequest, -): +def test_get_volume_rest_required_fields(request_type=volume.GetVolumeRequest): transport_class = transports.NetAppRestTransport request_init = {} @@ -24009,7 +27289,7 @@ def test_get_storage_pool_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_storage_pool._get_unset_required_fields(jsonified_request) + ).get_volume._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -24018,7 +27298,7 @@ def test_get_storage_pool_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_storage_pool._get_unset_required_fields(jsonified_request) + ).get_volume._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -24032,7 +27312,7 @@ def test_get_storage_pool_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = storage_pool.StoragePool() + return_value = volume.Volume() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -24053,30 +27333,30 @@ def test_get_storage_pool_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = storage_pool.StoragePool.pb(return_value) + return_value = volume.Volume.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_storage_pool(request) + response = client.get_volume(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_storage_pool_rest_unset_required_fields(): +def test_get_volume_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_storage_pool._get_unset_required_fields({}) + unset_fields = transport.get_volume._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_storage_pool_rest_flattened(): +def test_get_volume_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24085,12 +27365,10 @@ def test_get_storage_pool_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = storage_pool.StoragePool() + return_value = volume.Volume() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/storagePools/sample3" - } + sample_request = {"name": "projects/sample1/locations/sample2/volumes/sample3"} # get truthy value for each flattened field mock_args = dict( @@ -24102,26 +27380,25 @@ def test_get_storage_pool_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = storage_pool.StoragePool.pb(return_value) + return_value = volume.Volume.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_storage_pool(**mock_args) + client.get_volume(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/storagePools/*}" - % client.transport._host, + "%s/v1/{name=projects/*/locations/*/volumes/*}" % client.transport._host, args[1], ) -def test_get_storage_pool_rest_flattened_error(transport: str = "rest"): +def test_get_volume_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24130,13 +27407,13 @@ def test_get_storage_pool_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_storage_pool( - storage_pool.GetStoragePoolRequest(), + client.get_volume( + volume.GetVolumeRequest(), name="name_value", ) -def test_update_storage_pool_rest_use_cached_wrapped_rpc(): +def test_create_volume_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -24150,21 +27427,17 @@ def test_update_storage_pool_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_storage_pool in client._transport._wrapped_methods - ) + assert client._transport.create_volume in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_storage_pool - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_volume] = mock_rpc request = {} - client.update_storage_pool(request) + client.create_volume(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -24173,19 +27446,21 @@ def test_update_storage_pool_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_storage_pool(request) + client.create_volume(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_storage_pool_rest_required_fields( - request_type=gcn_storage_pool.UpdateStoragePoolRequest, +def test_create_volume_rest_required_fields( + request_type=gcn_volume.CreateVolumeRequest, ): transport_class = transports.NetAppRestTransport request_init = {} + request_init["parent"] = "" + request_init["volume_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -24193,22 +27468,32 @@ def test_update_storage_pool_rest_required_fields( ) # verify fields with default values are dropped + assert "volumeId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_storage_pool._get_unset_required_fields(jsonified_request) + ).create_volume._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "volumeId" in jsonified_request + assert jsonified_request["volumeId"] == request_init["volume_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["volumeId"] = "volume_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_storage_pool._get_unset_required_fields(jsonified_request) + ).create_volume._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + assert not set(unset_fields) - set(("volume_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "volumeId" in jsonified_request + assert jsonified_request["volumeId"] == "volume_id_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24229,7 +27514,7 @@ def test_update_storage_pool_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -24243,31 +27528,38 @@ def test_update_storage_pool_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_storage_pool(request) + response = client.create_volume(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "volumeId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_storage_pool_rest_unset_required_fields(): +def test_create_volume_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_storage_pool._get_unset_required_fields({}) + unset_fields = transport.create_volume._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("updateMask",)) + set(("volumeId",)) & set( ( - "updateMask", - "storagePool", + "parent", + "volumeId", + "volume", ) ) ) -def test_update_storage_pool_rest_flattened(): +def test_create_volume_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24279,16 +27571,13 @@ def test_update_storage_pool_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "storage_pool": { - "name": "projects/sample1/locations/sample2/storagePools/sample3" - } - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - storage_pool=gcn_storage_pool.StoragePool(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", + volume=gcn_volume.Volume(name="name_value"), + volume_id="volume_id_value", ) mock_args.update(sample_request) @@ -24300,20 +27589,19 @@ def test_update_storage_pool_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_storage_pool(**mock_args) + client.create_volume(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{storage_pool.name=projects/*/locations/*/storagePools/*}" - % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/volumes" % client.transport._host, args[1], ) -def test_update_storage_pool_rest_flattened_error(transport: str = "rest"): +def test_create_volume_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24322,14 +27610,15 @@ def test_update_storage_pool_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_storage_pool( - gcn_storage_pool.UpdateStoragePoolRequest(), - storage_pool=gcn_storage_pool.StoragePool(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.create_volume( + gcn_volume.CreateVolumeRequest(), + parent="parent_value", + volume=gcn_volume.Volume(name="name_value"), + volume_id="volume_id_value", ) -def test_delete_storage_pool_rest_use_cached_wrapped_rpc(): +def test_update_volume_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -24343,21 +27632,17 @@ def test_delete_storage_pool_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_storage_pool in client._transport._wrapped_methods - ) + assert client._transport.update_volume in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_storage_pool - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_volume] = mock_rpc request = {} - client.delete_storage_pool(request) + client.update_volume(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -24366,20 +27651,19 @@ def test_delete_storage_pool_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_storage_pool(request) + client.update_volume(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_storage_pool_rest_required_fields( - request_type=storage_pool.DeleteStoragePoolRequest, +def test_update_volume_rest_required_fields( + request_type=gcn_volume.UpdateVolumeRequest, ): transport_class = transports.NetAppRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -24390,21 +27674,19 @@ def test_delete_storage_pool_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_storage_pool._get_unset_required_fields(jsonified_request) + ).update_volume._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_storage_pool._get_unset_required_fields(jsonified_request) + ).update_volume._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24425,9 +27707,10 @@ def test_delete_storage_pool_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -24438,23 +27721,31 @@ def test_delete_storage_pool_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_storage_pool(request) + response = client.update_volume(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_storage_pool_rest_unset_required_fields(): +def test_update_volume_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_storage_pool._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_volume._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "updateMask", + "volume", + ) + ) + ) -def test_delete_storage_pool_rest_flattened(): +def test_update_volume_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24467,12 +27758,13 @@ def test_delete_storage_pool_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/storagePools/sample3" + "volume": {"name": "projects/sample1/locations/sample2/volumes/sample3"} } # get truthy value for each flattened field mock_args = dict( - name="name_value", + volume=gcn_volume.Volume(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -24484,20 +27776,20 @@ def test_delete_storage_pool_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_storage_pool(**mock_args) + client.update_volume(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/storagePools/*}" + "%s/v1/{volume.name=projects/*/locations/*/volumes/*}" % client.transport._host, args[1], ) -def test_delete_storage_pool_rest_flattened_error(transport: str = "rest"): +def test_update_volume_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24506,13 +27798,14 @@ def test_delete_storage_pool_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_storage_pool( - storage_pool.DeleteStoragePoolRequest(), - name="name_value", + client.update_volume( + gcn_volume.UpdateVolumeRequest(), + volume=gcn_volume.Volume(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_validate_directory_service_rest_use_cached_wrapped_rpc(): +def test_delete_volume_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -24526,22 +27819,17 @@ def test_validate_directory_service_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.validate_directory_service - in client._transport._wrapped_methods - ) + assert client._transport.delete_volume in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.validate_directory_service - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_volume] = mock_rpc request = {} - client.validate_directory_service(request) + client.delete_volume(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -24550,16 +27838,14 @@ def test_validate_directory_service_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.validate_directory_service(request) + client.delete_volume(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_validate_directory_service_rest_required_fields( - request_type=storage_pool.ValidateDirectoryServiceRequest, -): +def test_delete_volume_rest_required_fields(request_type=volume.DeleteVolumeRequest): transport_class = transports.NetAppRestTransport request_init = {} @@ -24574,7 +27860,7 @@ def test_validate_directory_service_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).validate_directory_service._get_unset_required_fields(jsonified_request) + ).delete_volume._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -24583,7 +27869,9 @@ def test_validate_directory_service_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).validate_directory_service._get_unset_required_fields(jsonified_request) + ).delete_volume._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -24609,37 +27897,91 @@ def test_validate_directory_service_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_volume(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_volume_rest_unset_required_fields(): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_volume._get_unset_required_fields({}) + assert set(unset_fields) == (set(("force",)) & set(("name",))) + + +def test_delete_volume_rest_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/volumes/sample3"} - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) - response = client.validate_directory_service(request) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + client.delete_volume(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/volumes/*}" % client.transport._host, + args[1], + ) -def test_validate_directory_service_rest_unset_required_fields(): - transport = transports.NetAppRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_delete_volume_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - unset_fields = transport.validate_directory_service._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_volume( + volume.DeleteVolumeRequest(), + name="name_value", + ) -def test_switch_active_replica_zone_rest_use_cached_wrapped_rpc(): +def test_revert_volume_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -24653,22 +27995,17 @@ def test_switch_active_replica_zone_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.switch_active_replica_zone - in client._transport._wrapped_methods - ) + assert client._transport.revert_volume in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.switch_active_replica_zone - ] = mock_rpc + client._transport._wrapped_methods[client._transport.revert_volume] = mock_rpc request = {} - client.switch_active_replica_zone(request) + client.revert_volume(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -24677,20 +28014,19 @@ def test_switch_active_replica_zone_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.switch_active_replica_zone(request) + client.revert_volume(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_switch_active_replica_zone_rest_required_fields( - request_type=storage_pool.SwitchActiveReplicaZoneRequest, -): +def test_revert_volume_rest_required_fields(request_type=volume.RevertVolumeRequest): transport_class = transports.NetAppRestTransport request_init = {} request_init["name"] = "" + request_init["snapshot_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -24701,21 +28037,24 @@ def test_switch_active_replica_zone_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).switch_active_replica_zone._get_unset_required_fields(jsonified_request) + ).revert_volume._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = "name_value" + jsonified_request["snapshotId"] = "snapshot_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).switch_active_replica_zone._get_unset_required_fields(jsonified_request) + ).revert_volume._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request assert jsonified_request["name"] == "name_value" + assert "snapshotId" in jsonified_request + assert jsonified_request["snapshotId"] == "snapshot_id_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24750,23 +28089,31 @@ def test_switch_active_replica_zone_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.switch_active_replica_zone(request) + response = client.revert_volume(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_switch_active_replica_zone_rest_unset_required_fields(): +def test_revert_volume_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.switch_active_replica_zone._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.revert_volume._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "snapshotId", + ) + ) + ) -def test_list_volumes_rest_use_cached_wrapped_rpc(): +def test_list_snapshots_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -24780,29 +28127,31 @@ def test_list_volumes_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_volumes in client._transport._wrapped_methods + assert client._transport.list_snapshots in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_volumes] = mock_rpc + client._transport._wrapped_methods[client._transport.list_snapshots] = mock_rpc request = {} - client.list_volumes(request) + client.list_snapshots(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_volumes(request) + client.list_snapshots(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_volumes_rest_required_fields(request_type=volume.ListVolumesRequest): +def test_list_snapshots_rest_required_fields( + request_type=snapshot.ListSnapshotsRequest, +): transport_class = transports.NetAppRestTransport request_init = {} @@ -24817,7 +28166,7 @@ def test_list_volumes_rest_required_fields(request_type=volume.ListVolumesReques unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_volumes._get_unset_required_fields(jsonified_request) + ).list_snapshots._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -24826,7 +28175,7 @@ def test_list_volumes_rest_required_fields(request_type=volume.ListVolumesReques unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_volumes._get_unset_required_fields(jsonified_request) + ).list_snapshots._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -24849,7 +28198,7 @@ def test_list_volumes_rest_required_fields(request_type=volume.ListVolumesReques request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = volume.ListVolumesResponse() + return_value = snapshot.ListSnapshotsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -24870,26 +28219,26 @@ def test_list_volumes_rest_required_fields(request_type=volume.ListVolumesReques response_value.status_code = 200 # Convert return value to protobuf type - return_value = volume.ListVolumesResponse.pb(return_value) + return_value = snapshot.ListSnapshotsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_volumes(request) + response = client.list_snapshots(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_volumes_rest_unset_required_fields(): +def test_list_snapshots_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_volumes._get_unset_required_fields({}) + unset_fields = transport.list_snapshots._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( @@ -24903,7 +28252,7 @@ def test_list_volumes_rest_unset_required_fields(): ) -def test_list_volumes_rest_flattened(): +def test_list_snapshots_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24912,10 +28261,12 @@ def test_list_volumes_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = volume.ListVolumesResponse() + return_value = snapshot.ListSnapshotsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/volumes/sample3" + } # get truthy value for each flattened field mock_args = dict( @@ -24927,25 +28278,26 @@ def test_list_volumes_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = volume.ListVolumesResponse.pb(return_value) + return_value = snapshot.ListSnapshotsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_volumes(**mock_args) + client.list_snapshots(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/volumes" % client.transport._host, + "%s/v1/{parent=projects/*/locations/*/volumes/*}/snapshots" + % client.transport._host, args[1], ) -def test_list_volumes_rest_flattened_error(transport: str = "rest"): +def test_list_snapshots_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24954,13 +28306,13 @@ def test_list_volumes_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_volumes( - volume.ListVolumesRequest(), + client.list_snapshots( + snapshot.ListSnapshotsRequest(), parent="parent_value", ) -def test_list_volumes_rest_pager(transport: str = "rest"): +def test_list_snapshots_rest_pager(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24972,28 +28324,28 @@ def test_list_volumes_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - volume.Volume(), - volume.Volume(), + snapshot.ListSnapshotsResponse( + snapshots=[ + snapshot.Snapshot(), + snapshot.Snapshot(), + snapshot.Snapshot(), ], next_page_token="abc", ), - volume.ListVolumesResponse( - volumes=[], + snapshot.ListSnapshotsResponse( + snapshots=[], next_page_token="def", ), - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), + snapshot.ListSnapshotsResponse( + snapshots=[ + snapshot.Snapshot(), ], next_page_token="ghi", ), - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - volume.Volume(), + snapshot.ListSnapshotsResponse( + snapshots=[ + snapshot.Snapshot(), + snapshot.Snapshot(), ], ), ) @@ -25001,27 +28353,29 @@ def test_list_volumes_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple(volume.ListVolumesResponse.to_json(x) for x in response) + response = tuple(snapshot.ListSnapshotsResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/volumes/sample3" + } - pager = client.list_volumes(request=sample_request) + pager = client.list_snapshots(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, volume.Volume) for i in results) + assert all(isinstance(i, snapshot.Snapshot) for i in results) - pages = list(client.list_volumes(request=sample_request).pages) + pages = list(client.list_snapshots(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -def test_get_volume_rest_use_cached_wrapped_rpc(): +def test_get_snapshot_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -25035,29 +28389,29 @@ def test_get_volume_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_volume in client._transport._wrapped_methods + assert client._transport.get_snapshot in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_volume] = mock_rpc + client._transport._wrapped_methods[client._transport.get_snapshot] = mock_rpc request = {} - client.get_volume(request) + client.get_snapshot(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_volume(request) + client.get_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_volume_rest_required_fields(request_type=volume.GetVolumeRequest): +def test_get_snapshot_rest_required_fields(request_type=snapshot.GetSnapshotRequest): transport_class = transports.NetAppRestTransport request_init = {} @@ -25072,7 +28426,7 @@ def test_get_volume_rest_required_fields(request_type=volume.GetVolumeRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_volume._get_unset_required_fields(jsonified_request) + ).get_snapshot._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -25081,7 +28435,7 @@ def test_get_volume_rest_required_fields(request_type=volume.GetVolumeRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_volume._get_unset_required_fields(jsonified_request) + ).get_snapshot._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -25095,7 +28449,7 @@ def test_get_volume_rest_required_fields(request_type=volume.GetVolumeRequest): request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = volume.Volume() + return_value = snapshot.Snapshot() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -25116,30 +28470,30 @@ def test_get_volume_rest_required_fields(request_type=volume.GetVolumeRequest): response_value.status_code = 200 # Convert return value to protobuf type - return_value = volume.Volume.pb(return_value) + return_value = snapshot.Snapshot.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_volume(request) + response = client.get_snapshot(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_volume_rest_unset_required_fields(): +def test_get_snapshot_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_volume._get_unset_required_fields({}) + unset_fields = transport.get_snapshot._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_volume_rest_flattened(): +def test_get_snapshot_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25148,10 +28502,12 @@ def test_get_volume_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = volume.Volume() + return_value = snapshot.Snapshot() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + sample_request = { + "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" + } # get truthy value for each flattened field mock_args = dict( @@ -25163,25 +28519,26 @@ def test_get_volume_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = volume.Volume.pb(return_value) + return_value = snapshot.Snapshot.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_volume(**mock_args) + client.get_snapshot(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/volumes/*}" % client.transport._host, + "%s/v1/{name=projects/*/locations/*/volumes/*/snapshots/*}" + % client.transport._host, args[1], ) -def test_get_volume_rest_flattened_error(transport: str = "rest"): +def test_get_snapshot_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -25190,13 +28547,13 @@ def test_get_volume_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_volume( - volume.GetVolumeRequest(), + client.get_snapshot( + snapshot.GetSnapshotRequest(), name="name_value", ) -def test_create_volume_rest_use_cached_wrapped_rpc(): +def test_create_snapshot_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -25210,17 +28567,17 @@ def test_create_volume_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_volume in client._transport._wrapped_methods + assert client._transport.create_snapshot in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_volume] = mock_rpc + client._transport._wrapped_methods[client._transport.create_snapshot] = mock_rpc request = {} - client.create_volume(request) + client.create_snapshot(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -25229,21 +28586,21 @@ def test_create_volume_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_volume(request) + client.create_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_volume_rest_required_fields( - request_type=gcn_volume.CreateVolumeRequest, +def test_create_snapshot_rest_required_fields( + request_type=gcn_snapshot.CreateSnapshotRequest, ): transport_class = transports.NetAppRestTransport request_init = {} request_init["parent"] = "" - request_init["volume_id"] = "" + request_init["snapshot_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -25251,32 +28608,32 @@ def test_create_volume_rest_required_fields( ) # verify fields with default values are dropped - assert "volumeId" not in jsonified_request + assert "snapshotId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_volume._get_unset_required_fields(jsonified_request) + ).create_snapshot._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "volumeId" in jsonified_request - assert jsonified_request["volumeId"] == request_init["volume_id"] + assert "snapshotId" in jsonified_request + assert jsonified_request["snapshotId"] == request_init["snapshot_id"] jsonified_request["parent"] = "parent_value" - jsonified_request["volumeId"] = "volume_id_value" + jsonified_request["snapshotId"] = "snapshot_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_volume._get_unset_required_fields(jsonified_request) + ).create_snapshot._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("volume_id",)) + assert not set(unset_fields) - set(("snapshot_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "volumeId" in jsonified_request - assert jsonified_request["volumeId"] == "volume_id_value" + assert "snapshotId" in jsonified_request + assert jsonified_request["snapshotId"] == "snapshot_id_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25311,11 +28668,11 @@ def test_create_volume_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_volume(request) + response = client.create_snapshot(request) expected_params = [ ( - "volumeId", + "snapshotId", "", ), ("$alt", "json;enum-encoding=int"), @@ -25324,211 +28681,25 @@ def test_create_volume_rest_required_fields( assert expected_params == actual_params -def test_create_volume_rest_unset_required_fields(): +def test_create_snapshot_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_volume._get_unset_required_fields({}) + unset_fields = transport.create_snapshot._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("volumeId",)) + set(("snapshotId",)) & set( ( "parent", - "volumeId", - "volume", - ) - ) - ) - - -def test_create_volume_rest_flattened(): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - volume=gcn_volume.Volume(name="name_value"), - volume_id="volume_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_volume(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/volumes" % client.transport._host, - args[1], - ) - - -def test_create_volume_rest_flattened_error(transport: str = "rest"): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_volume( - gcn_volume.CreateVolumeRequest(), - parent="parent_value", - volume=gcn_volume.Volume(name="name_value"), - volume_id="volume_id_value", - ) - - -def test_update_volume_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_volume in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.update_volume] = mock_rpc - - request = {} - client.update_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_volume(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_volume_rest_required_fields( - request_type=gcn_volume.UpdateVolumeRequest, -): - transport_class = transports.NetAppRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_volume._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_volume._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_volume(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_volume_rest_unset_required_fields(): - transport = transports.NetAppRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_volume._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "updateMask", - "volume", + "snapshot", + "snapshotId", ) ) ) -def test_update_volume_rest_flattened(): +def test_create_snapshot_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25541,13 +28712,14 @@ def test_update_volume_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "volume": {"name": "projects/sample1/locations/sample2/volumes/sample3"} + "parent": "projects/sample1/locations/sample2/volumes/sample3" } # get truthy value for each flattened field mock_args = dict( - volume=gcn_volume.Volume(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", + snapshot=gcn_snapshot.Snapshot(name="name_value"), + snapshot_id="snapshot_id_value", ) mock_args.update(sample_request) @@ -25559,20 +28731,20 @@ def test_update_volume_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_volume(**mock_args) + client.create_snapshot(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{volume.name=projects/*/locations/*/volumes/*}" + "%s/v1/{parent=projects/*/locations/*/volumes/*}/snapshots" % client.transport._host, args[1], ) -def test_update_volume_rest_flattened_error(transport: str = "rest"): +def test_create_snapshot_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -25581,14 +28753,15 @@ def test_update_volume_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_volume( - gcn_volume.UpdateVolumeRequest(), - volume=gcn_volume.Volume(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.create_snapshot( + gcn_snapshot.CreateSnapshotRequest(), + parent="parent_value", + snapshot=gcn_snapshot.Snapshot(name="name_value"), + snapshot_id="snapshot_id_value", ) -def test_delete_volume_rest_use_cached_wrapped_rpc(): +def test_delete_snapshot_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -25602,17 +28775,17 @@ def test_delete_volume_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_volume in client._transport._wrapped_methods + assert client._transport.delete_snapshot in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_volume] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_snapshot] = mock_rpc request = {} - client.delete_volume(request) + client.delete_snapshot(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -25621,14 +28794,16 @@ def test_delete_volume_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_volume(request) + client.delete_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_volume_rest_required_fields(request_type=volume.DeleteVolumeRequest): +def test_delete_snapshot_rest_required_fields( + request_type=snapshot.DeleteSnapshotRequest, +): transport_class = transports.NetAppRestTransport request_init = {} @@ -25643,7 +28818,7 @@ def test_delete_volume_rest_required_fields(request_type=volume.DeleteVolumeRequ unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_volume._get_unset_required_fields(jsonified_request) + ).delete_snapshot._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -25652,9 +28827,7 @@ def test_delete_volume_rest_required_fields(request_type=volume.DeleteVolumeRequ unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_volume._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("force",)) + ).delete_snapshot._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -25693,23 +28866,23 @@ def test_delete_volume_rest_required_fields(request_type=volume.DeleteVolumeRequ req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_volume(request) + response = client.delete_snapshot(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_volume_rest_unset_required_fields(): +def test_delete_snapshot_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_volume._get_unset_required_fields({}) - assert set(unset_fields) == (set(("force",)) & set(("name",))) + unset_fields = transport.delete_snapshot._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_volume_rest_flattened(): +def test_delete_snapshot_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25721,7 +28894,9 @@ def test_delete_volume_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + sample_request = { + "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" + } # get truthy value for each flattened field mock_args = dict( @@ -25737,19 +28912,20 @@ def test_delete_volume_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_volume(**mock_args) + client.delete_snapshot(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/volumes/*}" % client.transport._host, + "%s/v1/{name=projects/*/locations/*/volumes/*/snapshots/*}" + % client.transport._host, args[1], ) -def test_delete_volume_rest_flattened_error(transport: str = "rest"): +def test_delete_snapshot_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -25758,13 +28934,13 @@ def test_delete_volume_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_volume( - volume.DeleteVolumeRequest(), + client.delete_snapshot( + snapshot.DeleteSnapshotRequest(), name="name_value", ) -def test_revert_volume_rest_use_cached_wrapped_rpc(): +def test_update_snapshot_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -25778,17 +28954,17 @@ def test_revert_volume_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.revert_volume in client._transport._wrapped_methods + assert client._transport.update_snapshot in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.revert_volume] = mock_rpc + client._transport._wrapped_methods[client._transport.update_snapshot] = mock_rpc request = {} - client.revert_volume(request) + client.update_snapshot(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -25797,19 +28973,19 @@ def test_revert_volume_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.revert_volume(request) + client.update_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_revert_volume_rest_required_fields(request_type=volume.RevertVolumeRequest): +def test_update_snapshot_rest_required_fields( + request_type=gcn_snapshot.UpdateSnapshotRequest, +): transport_class = transports.NetAppRestTransport request_init = {} - request_init["name"] = "" - request_init["snapshot_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -25820,24 +28996,19 @@ def test_revert_volume_rest_required_fields(request_type=volume.RevertVolumeRequ unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).revert_volume._get_unset_required_fields(jsonified_request) + ).update_snapshot._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - jsonified_request["snapshotId"] = "snapshot_id_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).revert_volume._get_unset_required_fields(jsonified_request) + ).update_snapshot._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - assert "snapshotId" in jsonified_request - assert jsonified_request["snapshotId"] == "snapshot_id_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25858,7 +29029,7 @@ def test_revert_volume_rest_required_fields(request_type=volume.RevertVolumeRequ pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -25872,31 +29043,93 @@ def test_revert_volume_rest_required_fields(request_type=volume.RevertVolumeRequ req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.revert_volume(request) + response = client.update_snapshot(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_revert_volume_rest_unset_required_fields(): +def test_update_snapshot_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.revert_volume._get_unset_required_fields({}) + unset_fields = transport.update_snapshot._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set(("updateMask",)) & set( ( - "name", - "snapshotId", + "updateMask", + "snapshot", ) ) ) -def test_list_snapshots_rest_use_cached_wrapped_rpc(): +def test_update_snapshot_rest_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "snapshot": { + "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + snapshot=gcn_snapshot.Snapshot(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_snapshot(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{snapshot.name=projects/*/locations/*/volumes/*/snapshots/*}" + % client.transport._host, + args[1], + ) + + +def test_update_snapshot_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_snapshot( + gcn_snapshot.UpdateSnapshotRequest(), + snapshot=gcn_snapshot.Snapshot(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_list_active_directories_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -25910,30 +29143,35 @@ def test_list_snapshots_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_snapshots in client._transport._wrapped_methods + assert ( + client._transport.list_active_directories + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_snapshots] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_active_directories + ] = mock_rpc request = {} - client.list_snapshots(request) + client.list_active_directories(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_snapshots(request) + client.list_active_directories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_snapshots_rest_required_fields( - request_type=snapshot.ListSnapshotsRequest, +def test_list_active_directories_rest_required_fields( + request_type=active_directory.ListActiveDirectoriesRequest, ): transport_class = transports.NetAppRestTransport @@ -25949,7 +29187,7 @@ def test_list_snapshots_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_snapshots._get_unset_required_fields(jsonified_request) + ).list_active_directories._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -25958,7 +29196,7 @@ def test_list_snapshots_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_snapshots._get_unset_required_fields(jsonified_request) + ).list_active_directories._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -25981,7 +29219,7 @@ def test_list_snapshots_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = snapshot.ListSnapshotsResponse() + return_value = active_directory.ListActiveDirectoriesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -26002,26 +29240,28 @@ def test_list_snapshots_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = snapshot.ListSnapshotsResponse.pb(return_value) + return_value = active_directory.ListActiveDirectoriesResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_snapshots(request) + response = client.list_active_directories(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_snapshots_rest_unset_required_fields(): +def test_list_active_directories_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_snapshots._get_unset_required_fields({}) + unset_fields = transport.list_active_directories._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( @@ -26035,7 +29275,7 @@ def test_list_snapshots_rest_unset_required_fields(): ) -def test_list_snapshots_rest_flattened(): +def test_list_active_directories_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -26044,12 +29284,10 @@ def test_list_snapshots_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = snapshot.ListSnapshotsResponse() + return_value = active_directory.ListActiveDirectoriesResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/volumes/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( @@ -26061,26 +29299,26 @@ def test_list_snapshots_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = snapshot.ListSnapshotsResponse.pb(return_value) + return_value = active_directory.ListActiveDirectoriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_snapshots(**mock_args) + client.list_active_directories(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/volumes/*}/snapshots" + "%s/v1/{parent=projects/*/locations/*}/activeDirectories" % client.transport._host, args[1], ) -def test_list_snapshots_rest_flattened_error(transport: str = "rest"): +def test_list_active_directories_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -26089,13 +29327,13 @@ def test_list_snapshots_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_snapshots( - snapshot.ListSnapshotsRequest(), + client.list_active_directories( + active_directory.ListActiveDirectoriesRequest(), parent="parent_value", ) -def test_list_snapshots_rest_pager(transport: str = "rest"): +def test_list_active_directories_rest_pager(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -26107,28 +29345,28 @@ def test_list_snapshots_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - snapshot.ListSnapshotsResponse( - snapshots=[ - snapshot.Snapshot(), - snapshot.Snapshot(), - snapshot.Snapshot(), + active_directory.ListActiveDirectoriesResponse( + active_directories=[ + active_directory.ActiveDirectory(), + active_directory.ActiveDirectory(), + active_directory.ActiveDirectory(), ], next_page_token="abc", ), - snapshot.ListSnapshotsResponse( - snapshots=[], + active_directory.ListActiveDirectoriesResponse( + active_directories=[], next_page_token="def", ), - snapshot.ListSnapshotsResponse( - snapshots=[ - snapshot.Snapshot(), + active_directory.ListActiveDirectoriesResponse( + active_directories=[ + active_directory.ActiveDirectory(), ], next_page_token="ghi", ), - snapshot.ListSnapshotsResponse( - snapshots=[ - snapshot.Snapshot(), - snapshot.Snapshot(), + active_directory.ListActiveDirectoriesResponse( + active_directories=[ + active_directory.ActiveDirectory(), + active_directory.ActiveDirectory(), ], ), ) @@ -26136,29 +29374,29 @@ def test_list_snapshots_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple(snapshot.ListSnapshotsResponse.to_json(x) for x in response) + response = tuple( + active_directory.ListActiveDirectoriesResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = { - "parent": "projects/sample1/locations/sample2/volumes/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} - pager = client.list_snapshots(request=sample_request) + pager = client.list_active_directories(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, snapshot.Snapshot) for i in results) + assert all(isinstance(i, active_directory.ActiveDirectory) for i in results) - pages = list(client.list_snapshots(request=sample_request).pages) + pages = list(client.list_active_directories(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -def test_get_snapshot_rest_use_cached_wrapped_rpc(): +def test_get_active_directory_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -26172,29 +29410,35 @@ def test_get_snapshot_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_snapshot in client._transport._wrapped_methods + assert ( + client._transport.get_active_directory in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_snapshot] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_active_directory + ] = mock_rpc request = {} - client.get_snapshot(request) + client.get_active_directory(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_snapshot(request) + client.get_active_directory(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_snapshot_rest_required_fields(request_type=snapshot.GetSnapshotRequest): +def test_get_active_directory_rest_required_fields( + request_type=active_directory.GetActiveDirectoryRequest, +): transport_class = transports.NetAppRestTransport request_init = {} @@ -26209,7 +29453,7 @@ def test_get_snapshot_rest_required_fields(request_type=snapshot.GetSnapshotRequ unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_snapshot._get_unset_required_fields(jsonified_request) + ).get_active_directory._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -26218,7 +29462,7 @@ def test_get_snapshot_rest_required_fields(request_type=snapshot.GetSnapshotRequ unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_snapshot._get_unset_required_fields(jsonified_request) + ).get_active_directory._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -26232,7 +29476,7 @@ def test_get_snapshot_rest_required_fields(request_type=snapshot.GetSnapshotRequ request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = snapshot.Snapshot() + return_value = active_directory.ActiveDirectory() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -26253,30 +29497,30 @@ def test_get_snapshot_rest_required_fields(request_type=snapshot.GetSnapshotRequ response_value.status_code = 200 # Convert return value to protobuf type - return_value = snapshot.Snapshot.pb(return_value) + return_value = active_directory.ActiveDirectory.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_snapshot(request) + response = client.get_active_directory(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_snapshot_rest_unset_required_fields(): +def test_get_active_directory_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_snapshot._get_unset_required_fields({}) + unset_fields = transport.get_active_directory._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_snapshot_rest_flattened(): +def test_get_active_directory_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -26285,11 +29529,11 @@ def test_get_snapshot_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = snapshot.Snapshot() + return_value = active_directory.ActiveDirectory() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" + "name": "projects/sample1/locations/sample2/activeDirectories/sample3" } # get truthy value for each flattened field @@ -26302,26 +29546,26 @@ def test_get_snapshot_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = snapshot.Snapshot.pb(return_value) + return_value = active_directory.ActiveDirectory.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_snapshot(**mock_args) + client.get_active_directory(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/volumes/*/snapshots/*}" + "%s/v1/{name=projects/*/locations/*/activeDirectories/*}" % client.transport._host, args[1], ) -def test_get_snapshot_rest_flattened_error(transport: str = "rest"): +def test_get_active_directory_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -26330,13 +29574,13 @@ def test_get_snapshot_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_snapshot( - snapshot.GetSnapshotRequest(), + client.get_active_directory( + active_directory.GetActiveDirectoryRequest(), name="name_value", ) -def test_create_snapshot_rest_use_cached_wrapped_rpc(): +def test_create_active_directory_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -26350,17 +29594,22 @@ def test_create_snapshot_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_snapshot in client._transport._wrapped_methods + assert ( + client._transport.create_active_directory + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_snapshot] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_active_directory + ] = mock_rpc request = {} - client.create_snapshot(request) + client.create_active_directory(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -26369,21 +29618,21 @@ def test_create_snapshot_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_snapshot(request) + client.create_active_directory(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_snapshot_rest_required_fields( - request_type=gcn_snapshot.CreateSnapshotRequest, +def test_create_active_directory_rest_required_fields( + request_type=gcn_active_directory.CreateActiveDirectoryRequest, ): transport_class = transports.NetAppRestTransport request_init = {} request_init["parent"] = "" - request_init["snapshot_id"] = "" + request_init["active_directory_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -26391,32 +29640,32 @@ def test_create_snapshot_rest_required_fields( ) # verify fields with default values are dropped - assert "snapshotId" not in jsonified_request + assert "activeDirectoryId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_snapshot._get_unset_required_fields(jsonified_request) + ).create_active_directory._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "snapshotId" in jsonified_request - assert jsonified_request["snapshotId"] == request_init["snapshot_id"] + assert "activeDirectoryId" in jsonified_request + assert jsonified_request["activeDirectoryId"] == request_init["active_directory_id"] jsonified_request["parent"] = "parent_value" - jsonified_request["snapshotId"] = "snapshot_id_value" + jsonified_request["activeDirectoryId"] = "active_directory_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_snapshot._get_unset_required_fields(jsonified_request) + ).create_active_directory._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("snapshot_id",)) + assert not set(unset_fields) - set(("active_directory_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "snapshotId" in jsonified_request - assert jsonified_request["snapshotId"] == "snapshot_id_value" + assert "activeDirectoryId" in jsonified_request + assert jsonified_request["activeDirectoryId"] == "active_directory_id_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -26451,11 +29700,11 @@ def test_create_snapshot_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_snapshot(request) + response = client.create_active_directory(request) expected_params = [ ( - "snapshotId", + "activeDirectoryId", "", ), ("$alt", "json;enum-encoding=int"), @@ -26464,25 +29713,25 @@ def test_create_snapshot_rest_required_fields( assert expected_params == actual_params -def test_create_snapshot_rest_unset_required_fields(): +def test_create_active_directory_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_snapshot._get_unset_required_fields({}) + unset_fields = transport.create_active_directory._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("snapshotId",)) + set(("activeDirectoryId",)) & set( ( "parent", - "snapshot", - "snapshotId", + "activeDirectory", + "activeDirectoryId", ) ) ) -def test_create_snapshot_rest_flattened(): +def test_create_active_directory_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -26494,15 +29743,13 @@ def test_create_snapshot_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/volumes/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( parent="parent_value", - snapshot=gcn_snapshot.Snapshot(name="name_value"), - snapshot_id="snapshot_id_value", + active_directory=gcn_active_directory.ActiveDirectory(name="name_value"), + active_directory_id="active_directory_id_value", ) mock_args.update(sample_request) @@ -26514,20 +29761,20 @@ def test_create_snapshot_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_snapshot(**mock_args) + client.create_active_directory(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/volumes/*}/snapshots" + "%s/v1/{parent=projects/*/locations/*}/activeDirectories" % client.transport._host, args[1], ) -def test_create_snapshot_rest_flattened_error(transport: str = "rest"): +def test_create_active_directory_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -26536,15 +29783,15 @@ def test_create_snapshot_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_snapshot( - gcn_snapshot.CreateSnapshotRequest(), + client.create_active_directory( + gcn_active_directory.CreateActiveDirectoryRequest(), parent="parent_value", - snapshot=gcn_snapshot.Snapshot(name="name_value"), - snapshot_id="snapshot_id_value", + active_directory=gcn_active_directory.ActiveDirectory(name="name_value"), + active_directory_id="active_directory_id_value", ) -def test_delete_snapshot_rest_use_cached_wrapped_rpc(): +def test_update_active_directory_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -26558,17 +29805,22 @@ def test_delete_snapshot_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_snapshot in client._transport._wrapped_methods + assert ( + client._transport.update_active_directory + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_snapshot] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_active_directory + ] = mock_rpc request = {} - client.delete_snapshot(request) + client.update_active_directory(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -26577,20 +29829,19 @@ def test_delete_snapshot_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_snapshot(request) + client.update_active_directory(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_snapshot_rest_required_fields( - request_type=snapshot.DeleteSnapshotRequest, +def test_update_active_directory_rest_required_fields( + request_type=gcn_active_directory.UpdateActiveDirectoryRequest, ): transport_class = transports.NetAppRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -26601,21 +29852,19 @@ def test_delete_snapshot_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_snapshot._get_unset_required_fields(jsonified_request) + ).update_active_directory._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_snapshot._get_unset_required_fields(jsonified_request) + ).update_active_directory._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -26636,9 +29885,10 @@ def test_delete_snapshot_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -26649,23 +29899,31 @@ def test_delete_snapshot_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_snapshot(request) + response = client.update_active_directory(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_snapshot_rest_unset_required_fields(): +def test_update_active_directory_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_snapshot._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_active_directory._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "updateMask", + "activeDirectory", + ) + ) + ) -def test_delete_snapshot_rest_flattened(): +def test_update_active_directory_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -26678,12 +29936,15 @@ def test_delete_snapshot_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" + "active_directory": { + "name": "projects/sample1/locations/sample2/activeDirectories/sample3" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + active_directory=gcn_active_directory.ActiveDirectory(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -26695,20 +29956,20 @@ def test_delete_snapshot_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_snapshot(**mock_args) + client.update_active_directory(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/volumes/*/snapshots/*}" + "%s/v1/{active_directory.name=projects/*/locations/*/activeDirectories/*}" % client.transport._host, args[1], ) -def test_delete_snapshot_rest_flattened_error(transport: str = "rest"): +def test_update_active_directory_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -26717,13 +29978,14 @@ def test_delete_snapshot_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_snapshot( - snapshot.DeleteSnapshotRequest(), - name="name_value", + client.update_active_directory( + gcn_active_directory.UpdateActiveDirectoryRequest(), + active_directory=gcn_active_directory.ActiveDirectory(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_update_snapshot_rest_use_cached_wrapped_rpc(): +def test_delete_active_directory_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -26737,17 +29999,22 @@ def test_update_snapshot_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_snapshot in client._transport._wrapped_methods + assert ( + client._transport.delete_active_directory + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_snapshot] = mock_rpc + client._transport._wrapped_methods[ + client._transport.delete_active_directory + ] = mock_rpc request = {} - client.update_snapshot(request) + client.delete_active_directory(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -26756,19 +30023,20 @@ def test_update_snapshot_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_snapshot(request) + client.delete_active_directory(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_snapshot_rest_required_fields( - request_type=gcn_snapshot.UpdateSnapshotRequest, +def test_delete_active_directory_rest_required_fields( + request_type=active_directory.DeleteActiveDirectoryRequest, ): transport_class = transports.NetAppRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -26779,19 +30047,21 @@ def test_update_snapshot_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_snapshot._get_unset_required_fields(jsonified_request) + ).delete_active_directory._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_snapshot._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).delete_active_directory._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -26812,10 +30082,9 @@ def test_update_snapshot_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -26826,31 +30095,23 @@ def test_update_snapshot_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_snapshot(request) + response = client.delete_active_directory(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_snapshot_rest_unset_required_fields(): +def test_delete_active_directory_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_snapshot._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "updateMask", - "snapshot", - ) - ) - ) + unset_fields = transport.delete_active_directory._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_update_snapshot_rest_flattened(): +def test_delete_active_directory_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -26863,15 +30124,12 @@ def test_update_snapshot_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "snapshot": { - "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" - } + "name": "projects/sample1/locations/sample2/activeDirectories/sample3" } # get truthy value for each flattened field mock_args = dict( - snapshot=gcn_snapshot.Snapshot(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -26883,20 +30141,20 @@ def test_update_snapshot_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_snapshot(**mock_args) + client.delete_active_directory(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{snapshot.name=projects/*/locations/*/volumes/*/snapshots/*}" + "%s/v1/{name=projects/*/locations/*/activeDirectories/*}" % client.transport._host, args[1], ) -def test_update_snapshot_rest_flattened_error(transport: str = "rest"): +def test_delete_active_directory_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -26905,14 +30163,13 @@ def test_update_snapshot_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_snapshot( - gcn_snapshot.UpdateSnapshotRequest(), - snapshot=gcn_snapshot.Snapshot(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_active_directory( + active_directory.DeleteActiveDirectoryRequest(), + name="name_value", ) -def test_list_active_directories_rest_use_cached_wrapped_rpc(): +def test_list_kms_configs_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -26926,10 +30183,7 @@ def test_list_active_directories_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_active_directories - in client._transport._wrapped_methods - ) + assert client._transport.list_kms_configs in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -26937,25 +30191,23 @@ def test_list_active_directories_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_active_directories + client._transport.list_kms_configs ] = mock_rpc request = {} - client.list_active_directories(request) + client.list_kms_configs(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_active_directories(request) + client.list_kms_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_active_directories_rest_required_fields( - request_type=active_directory.ListActiveDirectoriesRequest, -): +def test_list_kms_configs_rest_required_fields(request_type=kms.ListKmsConfigsRequest): transport_class = transports.NetAppRestTransport request_init = {} @@ -26970,7 +30222,7 @@ def test_list_active_directories_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_active_directories._get_unset_required_fields(jsonified_request) + ).list_kms_configs._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -26979,7 +30231,7 @@ def test_list_active_directories_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_active_directories._get_unset_required_fields(jsonified_request) + ).list_kms_configs._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -27002,7 +30254,7 @@ def test_list_active_directories_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = active_directory.ListActiveDirectoriesResponse() + return_value = kms.ListKmsConfigsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -27023,28 +30275,26 @@ def test_list_active_directories_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = active_directory.ListActiveDirectoriesResponse.pb( - return_value - ) + return_value = kms.ListKmsConfigsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_active_directories(request) + response = client.list_kms_configs(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_active_directories_rest_unset_required_fields(): +def test_list_kms_configs_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_active_directories._get_unset_required_fields({}) + unset_fields = transport.list_kms_configs._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( @@ -27058,7 +30308,7 @@ def test_list_active_directories_rest_unset_required_fields(): ) -def test_list_active_directories_rest_flattened(): +def test_list_kms_configs_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -27067,7 +30317,7 @@ def test_list_active_directories_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = active_directory.ListActiveDirectoriesResponse() + return_value = kms.ListKmsConfigsResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -27082,26 +30332,25 @@ def test_list_active_directories_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = active_directory.ListActiveDirectoriesResponse.pb(return_value) + return_value = kms.ListKmsConfigsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_active_directories(**mock_args) + client.list_kms_configs(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/activeDirectories" - % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/kmsConfigs" % client.transport._host, args[1], ) -def test_list_active_directories_rest_flattened_error(transport: str = "rest"): +def test_list_kms_configs_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -27110,13 +30359,13 @@ def test_list_active_directories_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_active_directories( - active_directory.ListActiveDirectoriesRequest(), + client.list_kms_configs( + kms.ListKmsConfigsRequest(), parent="parent_value", ) -def test_list_active_directories_rest_pager(transport: str = "rest"): +def test_list_kms_configs_rest_pager(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -27128,28 +30377,28 @@ def test_list_active_directories_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - active_directory.ListActiveDirectoriesResponse( - active_directories=[ - active_directory.ActiveDirectory(), - active_directory.ActiveDirectory(), - active_directory.ActiveDirectory(), + kms.ListKmsConfigsResponse( + kms_configs=[ + kms.KmsConfig(), + kms.KmsConfig(), + kms.KmsConfig(), ], next_page_token="abc", ), - active_directory.ListActiveDirectoriesResponse( - active_directories=[], + kms.ListKmsConfigsResponse( + kms_configs=[], next_page_token="def", ), - active_directory.ListActiveDirectoriesResponse( - active_directories=[ - active_directory.ActiveDirectory(), + kms.ListKmsConfigsResponse( + kms_configs=[ + kms.KmsConfig(), ], next_page_token="ghi", ), - active_directory.ListActiveDirectoriesResponse( - active_directories=[ - active_directory.ActiveDirectory(), - active_directory.ActiveDirectory(), + kms.ListKmsConfigsResponse( + kms_configs=[ + kms.KmsConfig(), + kms.KmsConfig(), ], ), ) @@ -27157,9 +30406,7 @@ def test_list_active_directories_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple( - active_directory.ListActiveDirectoriesResponse.to_json(x) for x in response - ) + response = tuple(kms.ListKmsConfigsResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") @@ -27168,18 +30415,18 @@ def test_list_active_directories_rest_pager(transport: str = "rest"): sample_request = {"parent": "projects/sample1/locations/sample2"} - pager = client.list_active_directories(request=sample_request) + pager = client.list_kms_configs(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, active_directory.ActiveDirectory) for i in results) + assert all(isinstance(i, kms.KmsConfig) for i in results) - pages = list(client.list_active_directories(request=sample_request).pages) + pages = list(client.list_kms_configs(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -def test_get_active_directory_rest_use_cached_wrapped_rpc(): +def test_create_kms_config_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -27193,9 +30440,7 @@ def test_get_active_directory_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_active_directory in client._transport._wrapped_methods - ) + assert client._transport.create_kms_config in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -27203,29 +30448,34 @@ def test_get_active_directory_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_active_directory + client._transport.create_kms_config ] = mock_rpc request = {} - client.get_active_directory(request) + client.create_kms_config(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_active_directory(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_kms_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_active_directory_rest_required_fields( - request_type=active_directory.GetActiveDirectoryRequest, +def test_create_kms_config_rest_required_fields( + request_type=kms.CreateKmsConfigRequest, ): transport_class = transports.NetAppRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" + request_init["kms_config_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -27233,24 +30483,32 @@ def test_get_active_directory_rest_required_fields( ) # verify fields with default values are dropped + assert "kmsConfigId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_active_directory._get_unset_required_fields(jsonified_request) + ).create_kms_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "kmsConfigId" in jsonified_request + assert jsonified_request["kmsConfigId"] == request_init["kms_config_id"] - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["kmsConfigId"] = "kms_config_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_active_directory._get_unset_required_fields(jsonified_request) + ).create_kms_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("kms_config_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "kmsConfigId" in jsonified_request + assert jsonified_request["kmsConfigId"] == "kms_config_id_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -27259,7 +30517,7 @@ def test_get_active_directory_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = active_directory.ActiveDirectory() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -27271,39 +30529,52 @@ def test_get_active_directory_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = active_directory.ActiveDirectory.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_active_directory(request) + response = client.create_kms_config(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "kmsConfigId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_active_directory_rest_unset_required_fields(): +def test_create_kms_config_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_active_directory._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_kms_config._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("kmsConfigId",)) + & set( + ( + "parent", + "kmsConfigId", + "kmsConfig", + ) + ) + ) -def test_get_active_directory_rest_flattened(): +def test_create_kms_config_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -27312,43 +30583,40 @@ def test_get_active_directory_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = active_directory.ActiveDirectory() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/activeDirectories/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + kms_config=kms.KmsConfig(name="name_value"), + kms_config_id="kms_config_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = active_directory.ActiveDirectory.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_active_directory(**mock_args) + client.create_kms_config(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/activeDirectories/*}" - % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/kmsConfigs" % client.transport._host, args[1], ) -def test_get_active_directory_rest_flattened_error(transport: str = "rest"): +def test_create_kms_config_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -27357,13 +30625,15 @@ def test_get_active_directory_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_active_directory( - active_directory.GetActiveDirectoryRequest(), - name="name_value", + client.create_kms_config( + kms.CreateKmsConfigRequest(), + parent="parent_value", + kms_config=kms.KmsConfig(name="name_value"), + kms_config_id="kms_config_id_value", ) -def test_create_active_directory_rest_use_cached_wrapped_rpc(): +def test_get_kms_config_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -27377,45 +30647,33 @@ def test_create_active_directory_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_active_directory - in client._transport._wrapped_methods - ) + assert client._transport.get_kms_config in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_active_directory - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_kms_config] = mock_rpc request = {} - client.create_active_directory(request) + client.get_kms_config(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_active_directory(request) + client.get_kms_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_active_directory_rest_required_fields( - request_type=gcn_active_directory.CreateActiveDirectoryRequest, -): +def test_get_kms_config_rest_required_fields(request_type=kms.GetKmsConfigRequest): transport_class = transports.NetAppRestTransport request_init = {} - request_init["parent"] = "" - request_init["active_directory_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -27423,32 +30681,24 @@ def test_create_active_directory_rest_required_fields( ) # verify fields with default values are dropped - assert "activeDirectoryId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_active_directory._get_unset_required_fields(jsonified_request) + ).get_kms_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "activeDirectoryId" in jsonified_request - assert jsonified_request["activeDirectoryId"] == request_init["active_directory_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["activeDirectoryId"] = "active_directory_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_active_directory._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("active_directory_id",)) + ).get_kms_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "activeDirectoryId" in jsonified_request - assert jsonified_request["activeDirectoryId"] == "active_directory_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -27457,7 +30707,7 @@ def test_create_active_directory_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = kms.KmsConfig() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -27469,52 +30719,39 @@ def test_create_active_directory_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = kms.KmsConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_active_directory(request) + response = client.get_kms_config(request) - expected_params = [ - ( - "activeDirectoryId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_active_directory_rest_unset_required_fields(): +def test_get_kms_config_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_active_directory._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("activeDirectoryId",)) - & set( - ( - "parent", - "activeDirectory", - "activeDirectoryId", - ) - ) - ) + unset_fields = transport.get_kms_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_create_active_directory_rest_flattened(): +def test_get_kms_config_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -27523,41 +30760,42 @@ def test_create_active_directory_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = kms.KmsConfig() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/kmsConfigs/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - active_directory=gcn_active_directory.ActiveDirectory(name="name_value"), - active_directory_id="active_directory_id_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = kms.KmsConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_active_directory(**mock_args) + client.get_kms_config(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/activeDirectories" - % client.transport._host, + "%s/v1/{name=projects/*/locations/*/kmsConfigs/*}" % client.transport._host, args[1], ) -def test_create_active_directory_rest_flattened_error(transport: str = "rest"): +def test_get_kms_config_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -27566,15 +30804,13 @@ def test_create_active_directory_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_active_directory( - gcn_active_directory.CreateActiveDirectoryRequest(), - parent="parent_value", - active_directory=gcn_active_directory.ActiveDirectory(name="name_value"), - active_directory_id="active_directory_id_value", + client.get_kms_config( + kms.GetKmsConfigRequest(), + name="name_value", ) -def test_update_active_directory_rest_use_cached_wrapped_rpc(): +def test_update_kms_config_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -27588,10 +30824,7 @@ def test_update_active_directory_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_active_directory - in client._transport._wrapped_methods - ) + assert client._transport.update_kms_config in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -27599,11 +30832,11 @@ def test_update_active_directory_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_active_directory + client._transport.update_kms_config ] = mock_rpc request = {} - client.update_active_directory(request) + client.update_kms_config(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -27612,15 +30845,15 @@ def test_update_active_directory_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_active_directory(request) + client.update_kms_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_active_directory_rest_required_fields( - request_type=gcn_active_directory.UpdateActiveDirectoryRequest, +def test_update_kms_config_rest_required_fields( + request_type=kms.UpdateKmsConfigRequest, ): transport_class = transports.NetAppRestTransport @@ -27635,14 +30868,14 @@ def test_update_active_directory_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_active_directory._get_unset_required_fields(jsonified_request) + ).update_kms_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_active_directory._get_unset_required_fields(jsonified_request) + ).update_kms_config._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) @@ -27682,31 +30915,31 @@ def test_update_active_directory_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_active_directory(request) + response = client.update_kms_config(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_active_directory_rest_unset_required_fields(): +def test_update_kms_config_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_active_directory._get_unset_required_fields({}) + unset_fields = transport.update_kms_config._get_unset_required_fields({}) assert set(unset_fields) == ( set(("updateMask",)) & set( ( "updateMask", - "activeDirectory", + "kmsConfig", ) ) ) -def test_update_active_directory_rest_flattened(): +def test_update_kms_config_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -27719,14 +30952,14 @@ def test_update_active_directory_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "active_directory": { - "name": "projects/sample1/locations/sample2/activeDirectories/sample3" + "kms_config": { + "name": "projects/sample1/locations/sample2/kmsConfigs/sample3" } } # get truthy value for each flattened field mock_args = dict( - active_directory=gcn_active_directory.ActiveDirectory(name="name_value"), + kms_config=kms.KmsConfig(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -27739,20 +30972,20 @@ def test_update_active_directory_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_active_directory(**mock_args) + client.update_kms_config(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{active_directory.name=projects/*/locations/*/activeDirectories/*}" + "%s/v1/{kms_config.name=projects/*/locations/*/kmsConfigs/*}" % client.transport._host, args[1], ) -def test_update_active_directory_rest_flattened_error(transport: str = "rest"): +def test_update_kms_config_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -27761,14 +30994,14 @@ def test_update_active_directory_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_active_directory( - gcn_active_directory.UpdateActiveDirectoryRequest(), - active_directory=gcn_active_directory.ActiveDirectory(name="name_value"), + client.update_kms_config( + kms.UpdateKmsConfigRequest(), + kms_config=kms.KmsConfig(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_delete_active_directory_rest_use_cached_wrapped_rpc(): +def test_encrypt_volumes_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -27782,22 +31015,17 @@ def test_delete_active_directory_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_active_directory - in client._transport._wrapped_methods - ) + assert client._transport.encrypt_volumes in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_active_directory - ] = mock_rpc + client._transport._wrapped_methods[client._transport.encrypt_volumes] = mock_rpc request = {} - client.delete_active_directory(request) + client.encrypt_volumes(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -27806,16 +31034,14 @@ def test_delete_active_directory_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_active_directory(request) + client.encrypt_volumes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_active_directory_rest_required_fields( - request_type=active_directory.DeleteActiveDirectoryRequest, -): +def test_encrypt_volumes_rest_required_fields(request_type=kms.EncryptVolumesRequest): transport_class = transports.NetAppRestTransport request_init = {} @@ -27830,7 +31056,7 @@ def test_delete_active_directory_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_active_directory._get_unset_required_fields(jsonified_request) + ).encrypt_volumes._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -27839,7 +31065,7 @@ def test_delete_active_directory_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_active_directory._get_unset_required_fields(jsonified_request) + ).encrypt_volumes._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -27865,9 +31091,10 @@ def test_delete_active_directory_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -27878,81 +31105,23 @@ def test_delete_active_directory_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_active_directory(request) + response = client.encrypt_volumes(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_active_directory_rest_unset_required_fields(): +def test_encrypt_volumes_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_active_directory._get_unset_required_fields({}) + unset_fields = transport.encrypt_volumes._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_active_directory_rest_flattened(): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/activeDirectories/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_active_directory(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/activeDirectories/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_active_directory_rest_flattened_error(transport: str = "rest"): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_active_directory( - active_directory.DeleteActiveDirectoryRequest(), - name="name_value", - ) - - -def test_list_kms_configs_rest_use_cached_wrapped_rpc(): +def test_verify_kms_config_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -27966,7 +31135,7 @@ def test_list_kms_configs_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_kms_configs in client._transport._wrapped_methods + assert client._transport.verify_kms_config in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -27974,27 +31143,29 @@ def test_list_kms_configs_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_kms_configs + client._transport.verify_kms_config ] = mock_rpc request = {} - client.list_kms_configs(request) + client.verify_kms_config(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_kms_configs(request) + client.verify_kms_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_kms_configs_rest_required_fields(request_type=kms.ListKmsConfigsRequest): +def test_verify_kms_config_rest_required_fields( + request_type=kms.VerifyKmsConfigRequest, +): transport_class = transports.NetAppRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -28005,30 +31176,21 @@ def test_list_kms_configs_rest_required_fields(request_type=kms.ListKmsConfigsRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_kms_configs._get_unset_required_fields(jsonified_request) + ).verify_kms_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_kms_configs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + ).verify_kms_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -28037,7 +31199,7 @@ def test_list_kms_configs_rest_required_fields(request_type=kms.ListKmsConfigsRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = kms.ListKmsConfigsResponse() + return_value = kms.VerifyKmsConfigResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -28049,167 +31211,40 @@ def test_list_kms_configs_rest_required_fields(request_type=kms.ListKmsConfigsRe pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = kms.ListKmsConfigsResponse.pb(return_value) + return_value = kms.VerifyKmsConfigResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_kms_configs(request) + response = client.verify_kms_config(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_kms_configs_rest_unset_required_fields(): +def test_verify_kms_config_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_kms_configs._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -def test_list_kms_configs_rest_flattened(): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = kms.ListKmsConfigsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = kms.ListKmsConfigsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_kms_configs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/kmsConfigs" % client.transport._host, - args[1], - ) - - -def test_list_kms_configs_rest_flattened_error(transport: str = "rest"): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_kms_configs( - kms.ListKmsConfigsRequest(), - parent="parent_value", - ) - - -def test_list_kms_configs_rest_pager(transport: str = "rest"): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - kms.ListKmsConfigsResponse( - kms_configs=[ - kms.KmsConfig(), - kms.KmsConfig(), - kms.KmsConfig(), - ], - next_page_token="abc", - ), - kms.ListKmsConfigsResponse( - kms_configs=[], - next_page_token="def", - ), - kms.ListKmsConfigsResponse( - kms_configs=[ - kms.KmsConfig(), - ], - next_page_token="ghi", - ), - kms.ListKmsConfigsResponse( - kms_configs=[ - kms.KmsConfig(), - kms.KmsConfig(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(kms.ListKmsConfigsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_kms_configs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, kms.KmsConfig) for i in results) - - pages = list(client.list_kms_configs(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + unset_fields = transport.verify_kms_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_create_kms_config_rest_use_cached_wrapped_rpc(): +def test_delete_kms_config_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -28223,7 +31258,7 @@ def test_create_kms_config_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_kms_config in client._transport._wrapped_methods + assert client._transport.delete_kms_config in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -28231,11 +31266,11 @@ def test_create_kms_config_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_kms_config + client._transport.delete_kms_config ] = mock_rpc request = {} - client.create_kms_config(request) + client.delete_kms_config(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -28244,21 +31279,20 @@ def test_create_kms_config_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_kms_config(request) + client.delete_kms_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_kms_config_rest_required_fields( - request_type=kms.CreateKmsConfigRequest, +def test_delete_kms_config_rest_required_fields( + request_type=kms.DeleteKmsConfigRequest, ): transport_class = transports.NetAppRestTransport request_init = {} - request_init["parent"] = "" - request_init["kms_config_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -28266,32 +31300,24 @@ def test_create_kms_config_rest_required_fields( ) # verify fields with default values are dropped - assert "kmsConfigId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_kms_config._get_unset_required_fields(jsonified_request) + ).delete_kms_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "kmsConfigId" in jsonified_request - assert jsonified_request["kmsConfigId"] == request_init["kms_config_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["kmsConfigId"] = "kms_config_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_kms_config._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("kms_config_id",)) + ).delete_kms_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "kmsConfigId" in jsonified_request - assert jsonified_request["kmsConfigId"] == "kms_config_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -28312,10 +31338,9 @@ def test_create_kms_config_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -28326,38 +31351,23 @@ def test_create_kms_config_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_kms_config(request) + response = client.delete_kms_config(request) - expected_params = [ - ( - "kmsConfigId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_kms_config_rest_unset_required_fields(): +def test_delete_kms_config_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_kms_config._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("kmsConfigId",)) - & set( - ( - "parent", - "kmsConfigId", - "kmsConfig", - ) - ) - ) + unset_fields = transport.delete_kms_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_create_kms_config_rest_flattened(): +def test_delete_kms_config_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -28369,13 +31379,13 @@ def test_create_kms_config_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/kmsConfigs/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - kms_config=kms.KmsConfig(name="name_value"), - kms_config_id="kms_config_id_value", + name="name_value", ) mock_args.update(sample_request) @@ -28387,19 +31397,19 @@ def test_create_kms_config_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_kms_config(**mock_args) + client.delete_kms_config(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/kmsConfigs" % client.transport._host, + "%s/v1/{name=projects/*/locations/*/kmsConfigs/*}" % client.transport._host, args[1], ) -def test_create_kms_config_rest_flattened_error(transport: str = "rest"): +def test_delete_kms_config_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -28408,15 +31418,13 @@ def test_create_kms_config_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_kms_config( - kms.CreateKmsConfigRequest(), - parent="parent_value", - kms_config=kms.KmsConfig(name="name_value"), - kms_config_id="kms_config_id_value", + client.delete_kms_config( + kms.DeleteKmsConfigRequest(), + name="name_value", ) -def test_get_kms_config_rest_use_cached_wrapped_rpc(): +def test_list_replications_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -28430,33 +31438,37 @@ def test_get_kms_config_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_kms_config in client._transport._wrapped_methods + assert client._transport.list_replications in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_kms_config] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_replications + ] = mock_rpc request = {} - client.get_kms_config(request) + client.list_replications(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_kms_config(request) + client.list_replications(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_kms_config_rest_required_fields(request_type=kms.GetKmsConfigRequest): +def test_list_replications_rest_required_fields( + request_type=replication.ListReplicationsRequest, +): transport_class = transports.NetAppRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -28467,21 +31479,30 @@ def test_get_kms_config_rest_required_fields(request_type=kms.GetKmsConfigReques unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_kms_config._get_unset_required_fields(jsonified_request) + ).list_replications._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_kms_config._get_unset_required_fields(jsonified_request) + ).list_replications._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -28490,7 +31511,7 @@ def test_get_kms_config_rest_required_fields(request_type=kms.GetKmsConfigReques request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = kms.KmsConfig() + return_value = replication.ListReplicationsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -28511,30 +31532,40 @@ def test_get_kms_config_rest_required_fields(request_type=kms.GetKmsConfigReques response_value.status_code = 200 # Convert return value to protobuf type - return_value = kms.KmsConfig.pb(return_value) + return_value = replication.ListReplicationsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_kms_config(request) + response = client.list_replications(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_kms_config_rest_unset_required_fields(): +def test_list_replications_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_kms_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_replications._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -def test_get_kms_config_rest_flattened(): +def test_list_replications_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -28543,16 +31574,16 @@ def test_get_kms_config_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = kms.KmsConfig() + return_value = replication.ListReplicationsResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/kmsConfigs/sample3" + "parent": "projects/sample1/locations/sample2/volumes/sample3" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -28560,25 +31591,26 @@ def test_get_kms_config_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = kms.KmsConfig.pb(return_value) + return_value = replication.ListReplicationsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_kms_config(**mock_args) + client.list_replications(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/kmsConfigs/*}" % client.transport._host, + "%s/v1/{parent=projects/*/locations/*/volumes/*}/replications" + % client.transport._host, args[1], ) -def test_get_kms_config_rest_flattened_error(transport: str = "rest"): +def test_list_replications_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -28587,13 +31619,78 @@ def test_get_kms_config_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_kms_config( - kms.GetKmsConfigRequest(), - name="name_value", + client.list_replications( + replication.ListReplicationsRequest(), + parent="parent_value", ) -def test_update_kms_config_rest_use_cached_wrapped_rpc(): +def test_list_replications_rest_pager(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + replication.ListReplicationsResponse( + replications=[ + replication.Replication(), + replication.Replication(), + replication.Replication(), + ], + next_page_token="abc", + ), + replication.ListReplicationsResponse( + replications=[], + next_page_token="def", + ), + replication.ListReplicationsResponse( + replications=[ + replication.Replication(), + ], + next_page_token="ghi", + ), + replication.ListReplicationsResponse( + replications=[ + replication.Replication(), + replication.Replication(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + replication.ListReplicationsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/volumes/sample3" + } + + pager = client.list_replications(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, replication.Replication) for i in results) + + pages = list(client.list_replications(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_replication_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -28607,40 +31704,35 @@ def test_update_kms_config_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_kms_config in client._transport._wrapped_methods + assert client._transport.get_replication in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_kms_config - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_replication] = mock_rpc request = {} - client.update_kms_config(request) + client.get_replication(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_kms_config(request) + client.get_replication(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_kms_config_rest_required_fields( - request_type=kms.UpdateKmsConfigRequest, +def test_get_replication_rest_required_fields( + request_type=replication.GetReplicationRequest, ): transport_class = transports.NetAppRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -28651,19 +31743,21 @@ def test_update_kms_config_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_kms_config._get_unset_required_fields(jsonified_request) + ).get_replication._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_kms_config._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).get_replication._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -28672,7 +31766,7 @@ def test_update_kms_config_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = replication.Replication() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -28684,45 +31778,39 @@ def test_update_kms_config_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = replication.Replication.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_kms_config(request) + response = client.get_replication(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_kms_config_rest_unset_required_fields(): +def test_get_replication_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_kms_config._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "updateMask", - "kmsConfig", - ) - ) - ) + unset_fields = transport.get_replication._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_update_kms_config_rest_flattened(): +def test_get_replication_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -28731,44 +31819,43 @@ def test_update_kms_config_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = replication.Replication() # get arguments that satisfy an http rule for this method sample_request = { - "kms_config": { - "name": "projects/sample1/locations/sample2/kmsConfigs/sample3" - } + "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" } # get truthy value for each flattened field mock_args = dict( - kms_config=kms.KmsConfig(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = replication.Replication.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_kms_config(**mock_args) + client.get_replication(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{kms_config.name=projects/*/locations/*/kmsConfigs/*}" + "%s/v1/{name=projects/*/locations/*/volumes/*/replications/*}" % client.transport._host, args[1], ) -def test_update_kms_config_rest_flattened_error(transport: str = "rest"): +def test_get_replication_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -28777,14 +31864,13 @@ def test_update_kms_config_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_kms_config( - kms.UpdateKmsConfigRequest(), - kms_config=kms.KmsConfig(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_replication( + replication.GetReplicationRequest(), + name="name_value", ) -def test_encrypt_volumes_rest_use_cached_wrapped_rpc(): +def test_create_replication_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -28798,17 +31884,21 @@ def test_encrypt_volumes_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.encrypt_volumes in client._transport._wrapped_methods + assert ( + client._transport.create_replication in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.encrypt_volumes] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_replication + ] = mock_rpc request = {} - client.encrypt_volumes(request) + client.create_replication(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -28817,18 +31907,21 @@ def test_encrypt_volumes_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.encrypt_volumes(request) + client.create_replication(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_encrypt_volumes_rest_required_fields(request_type=kms.EncryptVolumesRequest): +def test_create_replication_rest_required_fields( + request_type=gcn_replication.CreateReplicationRequest, +): transport_class = transports.NetAppRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" + request_init["replication_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -28836,24 +31929,32 @@ def test_encrypt_volumes_rest_required_fields(request_type=kms.EncryptVolumesReq ) # verify fields with default values are dropped + assert "replicationId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).encrypt_volumes._get_unset_required_fields(jsonified_request) + ).create_replication._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "replicationId" in jsonified_request + assert jsonified_request["replicationId"] == request_init["replication_id"] - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["replicationId"] = "replication_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).encrypt_volumes._get_unset_required_fields(jsonified_request) + ).create_replication._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("replication_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "replicationId" in jsonified_request + assert jsonified_request["replicationId"] == "replication_id_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -28888,146 +31989,100 @@ def test_encrypt_volumes_rest_required_fields(request_type=kms.EncryptVolumesReq req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.encrypt_volumes(request) + response = client.create_replication(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "replicationId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_encrypt_volumes_rest_unset_required_fields(): +def test_create_replication_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.encrypt_volumes._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -def test_verify_kms_config_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.verify_kms_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + unset_fields = transport.create_replication._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("replicationId",)) + & set( + ( + "parent", + "replication", + "replicationId", + ) ) - client._transport._wrapped_methods[ - client._transport.verify_kms_config - ] = mock_rpc - - request = {} - client.verify_kms_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.verify_kms_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_verify_kms_config_rest_required_fields( - request_type=kms.VerifyKmsConfigRequest, -): - transport_class = transports.NetAppRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).verify_kms_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).verify_kms_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" +def test_create_replication_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = kms.VerifyKmsConfigResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") - # Convert return value to protobuf type - return_value = kms.VerifyKmsConfigResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/volumes/sample3" + } - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + replication=gcn_replication.Replication(name="name_value"), + replication_id="replication_id_value", + ) + mock_args.update(sample_request) - response = client.verify_kms_config(request) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + client.create_replication(**mock_args) + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/volumes/*}/replications" + % client.transport._host, + args[1], + ) -def test_verify_kms_config_rest_unset_required_fields(): - transport = transports.NetAppRestTransport( - credentials=ga_credentials.AnonymousCredentials + +def test_create_replication_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - unset_fields = transport.verify_kms_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_replication( + gcn_replication.CreateReplicationRequest(), + parent="parent_value", + replication=gcn_replication.Replication(name="name_value"), + replication_id="replication_id_value", + ) -def test_delete_kms_config_rest_use_cached_wrapped_rpc(): +def test_delete_replication_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -29041,7 +32096,9 @@ def test_delete_kms_config_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_kms_config in client._transport._wrapped_methods + assert ( + client._transport.delete_replication in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -29049,11 +32106,11 @@ def test_delete_kms_config_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_kms_config + client._transport.delete_replication ] = mock_rpc request = {} - client.delete_kms_config(request) + client.delete_replication(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -29062,15 +32119,15 @@ def test_delete_kms_config_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_kms_config(request) + client.delete_replication(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_kms_config_rest_required_fields( - request_type=kms.DeleteKmsConfigRequest, +def test_delete_replication_rest_required_fields( + request_type=replication.DeleteReplicationRequest, ): transport_class = transports.NetAppRestTransport @@ -29086,7 +32143,7 @@ def test_delete_kms_config_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_kms_config._get_unset_required_fields(jsonified_request) + ).delete_replication._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -29095,7 +32152,7 @@ def test_delete_kms_config_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_kms_config._get_unset_required_fields(jsonified_request) + ).delete_replication._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -29134,23 +32191,23 @@ def test_delete_kms_config_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_kms_config(request) + response = client.delete_replication(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_kms_config_rest_unset_required_fields(): +def test_delete_replication_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_kms_config._get_unset_required_fields({}) + unset_fields = transport.delete_replication._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_kms_config_rest_flattened(): +def test_delete_replication_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -29163,7 +32220,7 @@ def test_delete_kms_config_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/kmsConfigs/sample3" + "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" } # get truthy value for each flattened field @@ -29180,19 +32237,20 @@ def test_delete_kms_config_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_kms_config(**mock_args) + client.delete_replication(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/kmsConfigs/*}" % client.transport._host, + "%s/v1/{name=projects/*/locations/*/volumes/*/replications/*}" + % client.transport._host, args[1], ) -def test_delete_kms_config_rest_flattened_error(transport: str = "rest"): +def test_delete_replication_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -29201,13 +32259,13 @@ def test_delete_kms_config_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_kms_config( - kms.DeleteKmsConfigRequest(), + client.delete_replication( + replication.DeleteReplicationRequest(), name="name_value", ) -def test_list_replications_rest_use_cached_wrapped_rpc(): +def test_update_replication_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -29221,7 +32279,9 @@ def test_list_replications_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_replications in client._transport._wrapped_methods + assert ( + client._transport.update_replication in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -29229,29 +32289,32 @@ def test_list_replications_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_replications + client._transport.update_replication ] = mock_rpc request = {} - client.list_replications(request) + client.update_replication(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_replications(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_replication(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_replications_rest_required_fields( - request_type=replication.ListReplicationsRequest, +def test_update_replication_rest_required_fields( + request_type=gcn_replication.UpdateReplicationRequest, ): transport_class = transports.NetAppRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -29262,30 +32325,19 @@ def test_list_replications_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_replications._get_unset_required_fields(jsonified_request) + ).update_replication._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_replications._get_unset_required_fields(jsonified_request) + ).update_replication._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -29294,7 +32346,7 @@ def test_list_replications_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = replication.ListReplicationsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -29306,49 +32358,45 @@ def test_list_replications_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = replication.ListReplicationsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_replications(request) + response = client.update_replication(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_replications_rest_unset_required_fields(): +def test_update_replication_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_replications._get_unset_required_fields({}) + unset_fields = transport.update_replication._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(("updateMask",)) + & set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "updateMask", + "replication", ) ) - & set(("parent",)) ) -def test_list_replications_rest_flattened(): +def test_update_replication_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -29357,43 +32405,44 @@ def test_list_replications_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = replication.ListReplicationsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/volumes/sample3" + "replication": { + "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" + } } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + replication=gcn_replication.Replication(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = replication.ListReplicationsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_replications(**mock_args) + client.update_replication(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/volumes/*}/replications" + "%s/v1/{replication.name=projects/*/locations/*/volumes/*/replications/*}" % client.transport._host, args[1], ) -def test_list_replications_rest_flattened_error(transport: str = "rest"): +def test_update_replication_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -29402,78 +32451,138 @@ def test_list_replications_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_replications( - replication.ListReplicationsRequest(), - parent="parent_value", + client.update_replication( + gcn_replication.UpdateReplicationRequest(), + replication=gcn_replication.Replication(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_replications_rest_pager(transport: str = "rest"): +def test_stop_replication_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.stop_replication in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.stop_replication + ] = mock_rpc + + request = {} + client.stop_replication(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.stop_replication(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_stop_replication_rest_required_fields( + request_type=replication.StopReplicationRequest, +): + transport_class = transports.NetAppRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).stop_replication._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).stop_replication._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - replication.ListReplicationsResponse( - replications=[ - replication.Replication(), - replication.Replication(), - replication.Replication(), - ], - next_page_token="abc", - ), - replication.ListReplicationsResponse( - replications=[], - next_page_token="def", - ), - replication.ListReplicationsResponse( - replications=[ - replication.Replication(), - ], - next_page_token="ghi", - ), - replication.ListReplicationsResponse( - replications=[ - replication.Replication(), - replication.Replication(), - ], - ), - ) - # Two responses for two calls - response = response + response + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Wrap the values into proper Response objs - response = tuple( - replication.ListReplicationsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - sample_request = { - "parent": "projects/sample1/locations/sample2/volumes/sample3" - } + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - pager = client.list_replications(request=sample_request) + response = client.stop_replication(request) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, replication.Replication) for i in results) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - pages = list(client.list_replications(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_stop_replication_rest_unset_required_fields(): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.stop_replication._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_replication_rest_use_cached_wrapped_rpc(): + +def test_resume_replication_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -29487,30 +32596,38 @@ def test_get_replication_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_replication in client._transport._wrapped_methods + assert ( + client._transport.resume_replication in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_replication] = mock_rpc + client._transport._wrapped_methods[ + client._transport.resume_replication + ] = mock_rpc request = {} - client.get_replication(request) + client.resume_replication(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_replication(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.resume_replication(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_replication_rest_required_fields( - request_type=replication.GetReplicationRequest, +def test_resume_replication_rest_required_fields( + request_type=replication.ResumeReplicationRequest, ): transport_class = transports.NetAppRestTransport @@ -29526,7 +32643,7 @@ def test_get_replication_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_replication._get_unset_required_fields(jsonified_request) + ).resume_replication._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -29535,7 +32652,7 @@ def test_get_replication_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_replication._get_unset_required_fields(jsonified_request) + ).resume_replication._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -29549,7 +32666,7 @@ def test_get_replication_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = replication.Replication() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -29561,99 +32678,166 @@ def test_get_replication_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = replication.Replication.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_replication(request) + response = client.resume_replication(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_replication_rest_unset_required_fields(): +def test_resume_replication_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_replication._get_unset_required_fields({}) + unset_fields = transport.resume_replication._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_replication_rest_flattened(): +def test_reverse_replication_direction_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.reverse_replication_direction + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.reverse_replication_direction + ] = mock_rpc + + request = {} + client.reverse_replication_direction(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.reverse_replication_direction(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_reverse_replication_direction_rest_required_fields( + request_type=replication.ReverseReplicationDirectionRequest, +): + transport_class = transports.NetAppRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).reverse_replication_direction._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).reverse_replication_direction._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) + request = request_type(**request_init) + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = replication.Replication() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" - } + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = replication.Replication.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_replication(**mock_args) + response = client.reverse_replication_direction(request) - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/volumes/*/replications/*}" - % client.transport._host, - args[1], - ) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_replication_rest_flattened_error(transport: str = "rest"): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_reverse_replication_direction_rest_unset_required_fields(): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_replication( - replication.GetReplicationRequest(), - name="name_value", - ) + unset_fields = transport.reverse_replication_direction._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_create_replication_rest_use_cached_wrapped_rpc(): +def test_establish_peering_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -29667,9 +32851,7 @@ def test_create_replication_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_replication in client._transport._wrapped_methods - ) + assert client._transport.establish_peering in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -29677,11 +32859,11 @@ def test_create_replication_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_replication + client._transport.establish_peering ] = mock_rpc request = {} - client.create_replication(request) + client.establish_peering(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -29690,21 +32872,23 @@ def test_create_replication_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_replication(request) + client.establish_peering(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_replication_rest_required_fields( - request_type=gcn_replication.CreateReplicationRequest, +def test_establish_peering_rest_required_fields( + request_type=replication.EstablishPeeringRequest, ): transport_class = transports.NetAppRestTransport request_init = {} - request_init["parent"] = "" - request_init["replication_id"] = "" + request_init["name"] = "" + request_init["peer_cluster_name"] = "" + request_init["peer_svm_name"] = "" + request_init["peer_volume_name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -29712,32 +32896,33 @@ def test_create_replication_rest_required_fields( ) # verify fields with default values are dropped - assert "replicationId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_replication._get_unset_required_fields(jsonified_request) + ).establish_peering._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "replicationId" in jsonified_request - assert jsonified_request["replicationId"] == request_init["replication_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["replicationId"] = "replication_id_value" + jsonified_request["name"] = "name_value" + jsonified_request["peerClusterName"] = "peer_cluster_name_value" + jsonified_request["peerSvmName"] = "peer_svm_name_value" + jsonified_request["peerVolumeName"] = "peer_volume_name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_replication._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("replication_id",)) + ).establish_peering._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "replicationId" in jsonified_request - assert jsonified_request["replicationId"] == "replication_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "peerClusterName" in jsonified_request + assert jsonified_request["peerClusterName"] == "peer_cluster_name_value" + assert "peerSvmName" in jsonified_request + assert jsonified_request["peerSvmName"] == "peer_svm_name_value" + assert "peerVolumeName" in jsonified_request + assert jsonified_request["peerVolumeName"] == "peer_volume_name_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -29772,100 +32957,33 @@ def test_create_replication_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_replication(request) + response = client.establish_peering(request) - expected_params = [ - ( - "replicationId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_replication_rest_unset_required_fields(): +def test_establish_peering_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_replication._get_unset_required_fields({}) + unset_fields = transport.establish_peering._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("replicationId",)) + set(()) & set( ( - "parent", - "replication", - "replicationId", + "name", + "peerClusterName", + "peerSvmName", + "peerVolumeName", ) ) ) -def test_create_replication_rest_flattened(): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/volumes/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - replication=gcn_replication.Replication(name="name_value"), - replication_id="replication_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_replication(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/volumes/*}/replications" - % client.transport._host, - args[1], - ) - - -def test_create_replication_rest_flattened_error(transport: str = "rest"): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_replication( - gcn_replication.CreateReplicationRequest(), - parent="parent_value", - replication=gcn_replication.Replication(name="name_value"), - replication_id="replication_id_value", - ) - - -def test_delete_replication_rest_use_cached_wrapped_rpc(): +def test_sync_replication_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -29879,9 +32997,7 @@ def test_delete_replication_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_replication in client._transport._wrapped_methods - ) + assert client._transport.sync_replication in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -29889,11 +33005,11 @@ def test_delete_replication_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_replication + client._transport.sync_replication ] = mock_rpc request = {} - client.delete_replication(request) + client.sync_replication(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -29902,15 +33018,15 @@ def test_delete_replication_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_replication(request) + client.sync_replication(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_replication_rest_required_fields( - request_type=replication.DeleteReplicationRequest, +def test_sync_replication_rest_required_fields( + request_type=replication.SyncReplicationRequest, ): transport_class = transports.NetAppRestTransport @@ -29926,7 +33042,7 @@ def test_delete_replication_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_replication._get_unset_required_fields(jsonified_request) + ).sync_replication._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -29935,7 +33051,7 @@ def test_delete_replication_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_replication._get_unset_required_fields(jsonified_request) + ).sync_replication._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -29961,9 +33077,10 @@ def test_delete_replication_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -29974,81 +33091,23 @@ def test_delete_replication_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_replication(request) + response = client.sync_replication(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_replication_rest_unset_required_fields(): +def test_sync_replication_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_replication._get_unset_required_fields({}) + unset_fields = transport.sync_replication._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_replication_rest_flattened(): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_replication(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/volumes/*/replications/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_replication_rest_flattened_error(transport: str = "rest"): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_replication( - replication.DeleteReplicationRequest(), - name="name_value", - ) - - -def test_update_replication_rest_use_cached_wrapped_rpc(): +def test_create_backup_vault_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -30063,7 +33122,7 @@ def test_update_replication_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_replication in client._transport._wrapped_methods + client._transport.create_backup_vault in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -30072,11 +33131,11 @@ def test_update_replication_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_replication + client._transport.create_backup_vault ] = mock_rpc request = {} - client.update_replication(request) + client.create_backup_vault(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -30085,19 +33144,21 @@ def test_update_replication_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_replication(request) + client.create_backup_vault(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_replication_rest_required_fields( - request_type=gcn_replication.UpdateReplicationRequest, +def test_create_backup_vault_rest_required_fields( + request_type=gcn_backup_vault.CreateBackupVaultRequest, ): transport_class = transports.NetAppRestTransport request_init = {} + request_init["parent"] = "" + request_init["backup_vault_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -30105,22 +33166,32 @@ def test_update_replication_rest_required_fields( ) # verify fields with default values are dropped + assert "backupVaultId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_replication._get_unset_required_fields(jsonified_request) + ).create_backup_vault._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "backupVaultId" in jsonified_request + assert jsonified_request["backupVaultId"] == request_init["backup_vault_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["backupVaultId"] = "backup_vault_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_replication._get_unset_required_fields(jsonified_request) + ).create_backup_vault._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + assert not set(unset_fields) - set(("backup_vault_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "backupVaultId" in jsonified_request + assert jsonified_request["backupVaultId"] == "backup_vault_id_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -30141,7 +33212,7 @@ def test_update_replication_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -30155,31 +33226,38 @@ def test_update_replication_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_replication(request) + response = client.create_backup_vault(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "backupVaultId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_replication_rest_unset_required_fields(): +def test_create_backup_vault_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_replication._get_unset_required_fields({}) + unset_fields = transport.create_backup_vault._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("updateMask",)) + set(("backupVaultId",)) & set( ( - "updateMask", - "replication", + "parent", + "backupVaultId", + "backupVault", ) ) ) -def test_update_replication_rest_flattened(): +def test_create_backup_vault_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -30191,16 +33269,13 @@ def test_update_replication_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "replication": { - "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" - } - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - replication=gcn_replication.Replication(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", + backup_vault=gcn_backup_vault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", ) mock_args.update(sample_request) @@ -30212,20 +33287,20 @@ def test_update_replication_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_replication(**mock_args) + client.create_backup_vault(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{replication.name=projects/*/locations/*/volumes/*/replications/*}" + "%s/v1/{parent=projects/*/locations/*}/backupVaults" % client.transport._host, args[1], ) -def test_update_replication_rest_flattened_error(transport: str = "rest"): +def test_create_backup_vault_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -30234,14 +33309,15 @@ def test_update_replication_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_replication( - gcn_replication.UpdateReplicationRequest(), - replication=gcn_replication.Replication(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.create_backup_vault( + gcn_backup_vault.CreateBackupVaultRequest(), + parent="parent_value", + backup_vault=gcn_backup_vault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", ) -def test_stop_replication_rest_use_cached_wrapped_rpc(): +def test_get_backup_vault_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -30255,7 +33331,7 @@ def test_stop_replication_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.stop_replication in client._transport._wrapped_methods + assert client._transport.get_backup_vault in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -30263,28 +33339,24 @@ def test_stop_replication_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.stop_replication + client._transport.get_backup_vault ] = mock_rpc request = {} - client.stop_replication(request) + client.get_backup_vault(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.stop_replication(request) + client.get_backup_vault(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_stop_replication_rest_required_fields( - request_type=replication.StopReplicationRequest, +def test_get_backup_vault_rest_required_fields( + request_type=backup_vault.GetBackupVaultRequest, ): transport_class = transports.NetAppRestTransport @@ -30300,7 +33372,7 @@ def test_stop_replication_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).stop_replication._get_unset_required_fields(jsonified_request) + ).get_backup_vault._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -30309,7 +33381,7 @@ def test_stop_replication_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).stop_replication._get_unset_required_fields(jsonified_request) + ).get_backup_vault._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -30323,7 +33395,7 @@ def test_stop_replication_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backup_vault.BackupVault() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -30335,163 +33407,99 @@ def test_stop_replication_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup_vault.BackupVault.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.stop_replication(request) + response = client.get_backup_vault(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_stop_replication_rest_unset_required_fields(): +def test_get_backup_vault_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.stop_replication._get_unset_required_fields({}) + unset_fields = transport.get_backup_vault._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_resume_replication_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.resume_replication in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.resume_replication - ] = mock_rpc - - request = {} - client.resume_replication(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.resume_replication(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_resume_replication_rest_required_fields( - request_type=replication.ResumeReplicationRequest, -): - transport_class = transports.NetAppRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).resume_replication._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).resume_replication._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - +def test_get_backup_vault_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backup_vault.BackupVault() - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) - response = client.resume_replication(request) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup_vault.BackupVault.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + client.get_backup_vault(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*}" + % client.transport._host, + args[1], + ) -def test_resume_replication_rest_unset_required_fields(): - transport = transports.NetAppRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_get_backup_vault_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - unset_fields = transport.resume_replication._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_vault( + backup_vault.GetBackupVaultRequest(), + name="name_value", + ) -def test_reverse_replication_direction_rest_use_cached_wrapped_rpc(): +def test_list_backup_vaults_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -30506,8 +33514,7 @@ def test_reverse_replication_direction_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.reverse_replication_direction - in client._transport._wrapped_methods + client._transport.list_backup_vaults in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -30516,33 +33523,29 @@ def test_reverse_replication_direction_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.reverse_replication_direction + client._transport.list_backup_vaults ] = mock_rpc request = {} - client.reverse_replication_direction(request) + client.list_backup_vaults(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.reverse_replication_direction(request) + client.list_backup_vaults(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_reverse_replication_direction_rest_required_fields( - request_type=replication.ReverseReplicationDirectionRequest, +def test_list_backup_vaults_rest_required_fields( + request_type=backup_vault.ListBackupVaultsRequest, ): transport_class = transports.NetAppRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -30553,21 +33556,30 @@ def test_reverse_replication_direction_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).reverse_replication_direction._get_unset_required_fields(jsonified_request) + ).list_backup_vaults._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).reverse_replication_direction._get_unset_required_fields(jsonified_request) + ).list_backup_vaults._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -30576,7 +33588,7 @@ def test_reverse_replication_direction_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backup_vault.ListBackupVaultsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -30588,39 +33600,170 @@ def test_reverse_replication_direction_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup_vault.ListBackupVaultsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.reverse_replication_direction(request) + response = client.list_backup_vaults(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_reverse_replication_direction_rest_unset_required_fields(): +def test_list_backup_vaults_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.reverse_replication_direction._get_unset_required_fields( - {} + unset_fields = transport.list_backup_vaults._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) ) - assert set(unset_fields) == (set(()) & set(("name",))) -def test_establish_peering_rest_use_cached_wrapped_rpc(): +def test_list_backup_vaults_rest_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backup_vault.ListBackupVaultsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup_vault.ListBackupVaultsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_backup_vaults(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupVaults" + % client.transport._host, + args[1], + ) + + +def test_list_backup_vaults_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_vaults( + backup_vault.ListBackupVaultsRequest(), + parent="parent_value", + ) + + +def test_list_backup_vaults_rest_pager(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backup_vault.ListBackupVaultsResponse( + backup_vaults=[ + backup_vault.BackupVault(), + backup_vault.BackupVault(), + backup_vault.BackupVault(), + ], + next_page_token="abc", + ), + backup_vault.ListBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backup_vault.ListBackupVaultsResponse( + backup_vaults=[ + backup_vault.BackupVault(), + ], + next_page_token="ghi", + ), + backup_vault.ListBackupVaultsResponse( + backup_vaults=[ + backup_vault.BackupVault(), + backup_vault.BackupVault(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backup_vault.ListBackupVaultsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_backup_vaults(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backup_vault.BackupVault) for i in results) + + pages = list(client.list_backup_vaults(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_update_backup_vault_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -30634,7 +33777,9 @@ def test_establish_peering_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.establish_peering in client._transport._wrapped_methods + assert ( + client._transport.update_backup_vault in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -30642,11 +33787,11 @@ def test_establish_peering_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.establish_peering + client._transport.update_backup_vault ] = mock_rpc request = {} - client.establish_peering(request) + client.update_backup_vault(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -30655,23 +33800,19 @@ def test_establish_peering_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.establish_peering(request) + client.update_backup_vault(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_establish_peering_rest_required_fields( - request_type=replication.EstablishPeeringRequest, +def test_update_backup_vault_rest_required_fields( + request_type=gcn_backup_vault.UpdateBackupVaultRequest, ): transport_class = transports.NetAppRestTransport request_init = {} - request_init["name"] = "" - request_init["peer_cluster_name"] = "" - request_init["peer_svm_name"] = "" - request_init["peer_volume_name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -30682,30 +33823,19 @@ def test_establish_peering_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).establish_peering._get_unset_required_fields(jsonified_request) + ).update_backup_vault._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - jsonified_request["peerClusterName"] = "peer_cluster_name_value" - jsonified_request["peerSvmName"] = "peer_svm_name_value" - jsonified_request["peerVolumeName"] = "peer_volume_name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).establish_peering._get_unset_required_fields(jsonified_request) + ).update_backup_vault._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - assert "peerClusterName" in jsonified_request - assert jsonified_request["peerClusterName"] == "peer_cluster_name_value" - assert "peerSvmName" in jsonified_request - assert jsonified_request["peerSvmName"] == "peer_svm_name_value" - assert "peerVolumeName" in jsonified_request - assert jsonified_request["peerVolumeName"] == "peer_volume_name_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -30726,7 +33856,7 @@ def test_establish_peering_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -30740,33 +33870,93 @@ def test_establish_peering_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.establish_peering(request) + response = client.update_backup_vault(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_establish_peering_rest_unset_required_fields(): +def test_update_backup_vault_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.establish_peering._get_unset_required_fields({}) + unset_fields = transport.update_backup_vault._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set(("updateMask",)) & set( ( - "name", - "peerClusterName", - "peerSvmName", - "peerVolumeName", + "updateMask", + "backupVault", ) ) ) -def test_sync_replication_rest_use_cached_wrapped_rpc(): +def test_update_backup_vault_rest_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "backup_vault": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + backup_vault=gcn_backup_vault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_backup_vault(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{backup_vault.name=projects/*/locations/*/backupVaults/*}" + % client.transport._host, + args[1], + ) + + +def test_update_backup_vault_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup_vault( + gcn_backup_vault.UpdateBackupVaultRequest(), + backup_vault=gcn_backup_vault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_backup_vault_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -30780,7 +33970,9 @@ def test_sync_replication_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.sync_replication in client._transport._wrapped_methods + assert ( + client._transport.delete_backup_vault in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -30788,11 +33980,11 @@ def test_sync_replication_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.sync_replication + client._transport.delete_backup_vault ] = mock_rpc request = {} - client.sync_replication(request) + client.delete_backup_vault(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -30801,15 +33993,15 @@ def test_sync_replication_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.sync_replication(request) + client.delete_backup_vault(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_sync_replication_rest_required_fields( - request_type=replication.SyncReplicationRequest, +def test_delete_backup_vault_rest_required_fields( + request_type=backup_vault.DeleteBackupVaultRequest, ): transport_class = transports.NetAppRestTransport @@ -30825,7 +34017,7 @@ def test_sync_replication_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).sync_replication._get_unset_required_fields(jsonified_request) + ).delete_backup_vault._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -30834,7 +34026,7 @@ def test_sync_replication_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).sync_replication._get_unset_required_fields(jsonified_request) + ).delete_backup_vault._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -30860,10 +34052,9 @@ def test_sync_replication_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -30874,23 +34065,81 @@ def test_sync_replication_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.sync_replication(request) + response = client.delete_backup_vault(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_sync_replication_rest_unset_required_fields(): +def test_delete_backup_vault_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.sync_replication._get_unset_required_fields({}) + unset_fields = transport.delete_backup_vault._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_create_backup_vault_rest_use_cached_wrapped_rpc(): +def test_delete_backup_vault_rest_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_backup_vault(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_backup_vault_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_vault( + backup_vault.DeleteBackupVaultRequest(), + name="name_value", + ) + + +def test_create_backup_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -30904,21 +34153,17 @@ def test_create_backup_vault_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_backup_vault in client._transport._wrapped_methods - ) + assert client._transport.create_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_backup_vault - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_backup] = mock_rpc request = {} - client.create_backup_vault(request) + client.create_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -30927,21 +34172,21 @@ def test_create_backup_vault_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_backup_vault(request) + client.create_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_backup_vault_rest_required_fields( - request_type=gcn_backup_vault.CreateBackupVaultRequest, +def test_create_backup_rest_required_fields( + request_type=gcn_backup.CreateBackupRequest, ): transport_class = transports.NetAppRestTransport request_init = {} request_init["parent"] = "" - request_init["backup_vault_id"] = "" + request_init["backup_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -30949,32 +34194,32 @@ def test_create_backup_vault_rest_required_fields( ) # verify fields with default values are dropped - assert "backupVaultId" not in jsonified_request + assert "backupId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_backup_vault._get_unset_required_fields(jsonified_request) + ).create_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "backupVaultId" in jsonified_request - assert jsonified_request["backupVaultId"] == request_init["backup_vault_id"] + assert "backupId" in jsonified_request + assert jsonified_request["backupId"] == request_init["backup_id"] jsonified_request["parent"] = "parent_value" - jsonified_request["backupVaultId"] = "backup_vault_id_value" + jsonified_request["backupId"] = "backup_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_backup_vault._get_unset_required_fields(jsonified_request) + ).create_backup._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("backup_vault_id",)) + assert not set(unset_fields) - set(("backup_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "backupVaultId" in jsonified_request - assert jsonified_request["backupVaultId"] == "backup_vault_id_value" + assert "backupId" in jsonified_request + assert jsonified_request["backupId"] == "backup_id_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -31009,11 +34254,11 @@ def test_create_backup_vault_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_backup_vault(request) + response = client.create_backup(request) expected_params = [ ( - "backupVaultId", + "backupId", "", ), ("$alt", "json;enum-encoding=int"), @@ -31022,25 +34267,25 @@ def test_create_backup_vault_rest_required_fields( assert expected_params == actual_params -def test_create_backup_vault_rest_unset_required_fields(): +def test_create_backup_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_backup_vault._get_unset_required_fields({}) + unset_fields = transport.create_backup._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("backupVaultId",)) + set(("backupId",)) & set( ( "parent", - "backupVaultId", - "backupVault", + "backupId", + "backup", ) ) ) -def test_create_backup_vault_rest_flattened(): +def test_create_backup_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -31052,13 +34297,15 @@ def test_create_backup_vault_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3" + } # get truthy value for each flattened field mock_args = dict( parent="parent_value", - backup_vault=gcn_backup_vault.BackupVault(name="name_value"), - backup_vault_id="backup_vault_id_value", + backup=gcn_backup.Backup(name="name_value"), + backup_id="backup_id_value", ) mock_args.update(sample_request) @@ -31070,20 +34317,20 @@ def test_create_backup_vault_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_backup_vault(**mock_args) + client.create_backup(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/backupVaults" + "%s/v1/{parent=projects/*/locations/*/backupVaults/*}/backups" % client.transport._host, args[1], ) -def test_create_backup_vault_rest_flattened_error(transport: str = "rest"): +def test_create_backup_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -31092,15 +34339,15 @@ def test_create_backup_vault_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_backup_vault( - gcn_backup_vault.CreateBackupVaultRequest(), + client.create_backup( + gcn_backup.CreateBackupRequest(), parent="parent_value", - backup_vault=gcn_backup_vault.BackupVault(name="name_value"), - backup_vault_id="backup_vault_id_value", + backup=gcn_backup.Backup(name="name_value"), + backup_id="backup_id_value", ) -def test_get_backup_vault_rest_use_cached_wrapped_rpc(): +def test_get_backup_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -31114,33 +34361,29 @@ def test_get_backup_vault_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_backup_vault in client._transport._wrapped_methods + assert client._transport.get_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_backup_vault - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc request = {} - client.get_backup_vault(request) + client.get_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_backup_vault(request) + client.get_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_backup_vault_rest_required_fields( - request_type=backup_vault.GetBackupVaultRequest, -): +def test_get_backup_rest_required_fields(request_type=backup.GetBackupRequest): transport_class = transports.NetAppRestTransport request_init = {} @@ -31155,7 +34398,7 @@ def test_get_backup_vault_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_backup_vault._get_unset_required_fields(jsonified_request) + ).get_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -31164,7 +34407,7 @@ def test_get_backup_vault_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_backup_vault._get_unset_required_fields(jsonified_request) + ).get_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -31178,7 +34421,7 @@ def test_get_backup_vault_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = backup_vault.BackupVault() + return_value = backup.Backup() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -31199,30 +34442,30 @@ def test_get_backup_vault_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = backup_vault.BackupVault.pb(return_value) + return_value = backup.Backup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_backup_vault(request) + response = client.get_backup(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_backup_vault_rest_unset_required_fields(): +def test_get_backup_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_backup_vault._get_unset_required_fields({}) + unset_fields = transport.get_backup._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_backup_vault_rest_flattened(): +def test_get_backup_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -31231,11 +34474,11 @@ def test_get_backup_vault_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backup_vault.BackupVault() + return_value = backup.Backup() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/backupVaults/sample3" + "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4" } # get truthy value for each flattened field @@ -31248,26 +34491,26 @@ def test_get_backup_vault_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = backup_vault.BackupVault.pb(return_value) + return_value = backup.Backup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_backup_vault(**mock_args) + client.get_backup(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/backupVaults/*}" + "%s/v1/{name=projects/*/locations/*/backupVaults/*/backups/*}" % client.transport._host, args[1], ) -def test_get_backup_vault_rest_flattened_error(transport: str = "rest"): +def test_get_backup_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -31276,13 +34519,13 @@ def test_get_backup_vault_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_backup_vault( - backup_vault.GetBackupVaultRequest(), + client.get_backup( + backup.GetBackupRequest(), name="name_value", ) -def test_list_backup_vaults_rest_use_cached_wrapped_rpc(): +def test_list_backups_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -31296,35 +34539,29 @@ def test_list_backup_vaults_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_backup_vaults in client._transport._wrapped_methods - ) + assert client._transport.list_backups in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_backup_vaults - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc request = {} - client.list_backup_vaults(request) + client.list_backups(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_backup_vaults(request) + client.list_backups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_backup_vaults_rest_required_fields( - request_type=backup_vault.ListBackupVaultsRequest, -): +def test_list_backups_rest_required_fields(request_type=backup.ListBackupsRequest): transport_class = transports.NetAppRestTransport request_init = {} @@ -31339,7 +34576,7 @@ def test_list_backup_vaults_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_backup_vaults._get_unset_required_fields(jsonified_request) + ).list_backups._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -31348,7 +34585,7 @@ def test_list_backup_vaults_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_backup_vaults._get_unset_required_fields(jsonified_request) + ).list_backups._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -31371,7 +34608,7 @@ def test_list_backup_vaults_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = backup_vault.ListBackupVaultsResponse() + return_value = backup.ListBackupsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -31392,26 +34629,26 @@ def test_list_backup_vaults_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = backup_vault.ListBackupVaultsResponse.pb(return_value) + return_value = backup.ListBackupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backup_vaults(request) + response = client.list_backups(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_backup_vaults_rest_unset_required_fields(): +def test_list_backups_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_backup_vaults._get_unset_required_fields({}) + unset_fields = transport.list_backups._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( @@ -31425,7 +34662,7 @@ def test_list_backup_vaults_rest_unset_required_fields(): ) -def test_list_backup_vaults_rest_flattened(): +def test_list_backups_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -31434,10 +34671,12 @@ def test_list_backup_vaults_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backup_vault.ListBackupVaultsResponse() + return_value = backup.ListBackupsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3" + } # get truthy value for each flattened field mock_args = dict( @@ -31449,26 +34688,26 @@ def test_list_backup_vaults_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = backup_vault.ListBackupVaultsResponse.pb(return_value) + return_value = backup.ListBackupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backup_vaults(**mock_args) + client.list_backups(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/backupVaults" + "%s/v1/{parent=projects/*/locations/*/backupVaults/*}/backups" % client.transport._host, args[1], ) -def test_list_backup_vaults_rest_flattened_error(transport: str = "rest"): +def test_list_backups_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -31477,13 +34716,13 @@ def test_list_backup_vaults_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_backup_vaults( - backup_vault.ListBackupVaultsRequest(), + client.list_backups( + backup.ListBackupsRequest(), parent="parent_value", ) -def test_list_backup_vaults_rest_pager(transport: str = "rest"): +def test_list_backups_rest_pager(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -31495,28 +34734,28 @@ def test_list_backup_vaults_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - backup_vault.ListBackupVaultsResponse( - backup_vaults=[ - backup_vault.BackupVault(), - backup_vault.BackupVault(), - backup_vault.BackupVault(), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + backup.Backup(), ], next_page_token="abc", ), - backup_vault.ListBackupVaultsResponse( - backup_vaults=[], + backup.ListBackupsResponse( + backups=[], next_page_token="def", ), - backup_vault.ListBackupVaultsResponse( - backup_vaults=[ - backup_vault.BackupVault(), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), ], next_page_token="ghi", ), - backup_vault.ListBackupVaultsResponse( - backup_vaults=[ - backup_vault.BackupVault(), - backup_vault.BackupVault(), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), ], ), ) @@ -31524,29 +34763,29 @@ def test_list_backup_vaults_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple( - backup_vault.ListBackupVaultsResponse.to_json(x) for x in response - ) + response = tuple(backup.ListBackupsResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3" + } - pager = client.list_backup_vaults(request=sample_request) + pager = client.list_backups(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, backup_vault.BackupVault) for i in results) + assert all(isinstance(i, backup.Backup) for i in results) - pages = list(client.list_backup_vaults(request=sample_request).pages) + pages = list(client.list_backups(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -def test_update_backup_vault_rest_use_cached_wrapped_rpc(): +def test_delete_backup_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -31560,21 +34799,17 @@ def test_update_backup_vault_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_backup_vault in client._transport._wrapped_methods - ) + assert client._transport.delete_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_backup_vault - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc request = {} - client.update_backup_vault(request) + client.delete_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -31583,19 +34818,18 @@ def test_update_backup_vault_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_backup_vault(request) + client.delete_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_backup_vault_rest_required_fields( - request_type=gcn_backup_vault.UpdateBackupVaultRequest, -): +def test_delete_backup_rest_required_fields(request_type=backup.DeleteBackupRequest): transport_class = transports.NetAppRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -31606,19 +34840,21 @@ def test_update_backup_vault_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_backup_vault._get_unset_required_fields(jsonified_request) + ).delete_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_backup_vault._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).delete_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -31639,10 +34875,9 @@ def test_update_backup_vault_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -31653,31 +34888,23 @@ def test_update_backup_vault_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_backup_vault(request) + response = client.delete_backup(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_backup_vault_rest_unset_required_fields(): +def test_delete_backup_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_backup_vault._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "updateMask", - "backupVault", - ) - ) - ) + unset_fields = transport.delete_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_update_backup_vault_rest_flattened(): +def test_delete_backup_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -31690,15 +34917,12 @@ def test_update_backup_vault_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "backup_vault": { - "name": "projects/sample1/locations/sample2/backupVaults/sample3" - } + "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4" } # get truthy value for each flattened field mock_args = dict( - backup_vault=gcn_backup_vault.BackupVault(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -31710,20 +34934,20 @@ def test_update_backup_vault_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_backup_vault(**mock_args) + client.delete_backup(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{backup_vault.name=projects/*/locations/*/backupVaults/*}" + "%s/v1/{name=projects/*/locations/*/backupVaults/*/backups/*}" % client.transport._host, args[1], ) -def test_update_backup_vault_rest_flattened_error(transport: str = "rest"): +def test_delete_backup_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -31732,14 +34956,13 @@ def test_update_backup_vault_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_backup_vault( - gcn_backup_vault.UpdateBackupVaultRequest(), - backup_vault=gcn_backup_vault.BackupVault(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_backup( + backup.DeleteBackupRequest(), + name="name_value", ) -def test_delete_backup_vault_rest_use_cached_wrapped_rpc(): +def test_update_backup_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -31753,21 +34976,17 @@ def test_delete_backup_vault_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_backup_vault in client._transport._wrapped_methods - ) + assert client._transport.update_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_backup_vault - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc request = {} - client.delete_backup_vault(request) + client.update_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -31776,20 +34995,19 @@ def test_delete_backup_vault_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_backup_vault(request) + client.update_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_backup_vault_rest_required_fields( - request_type=backup_vault.DeleteBackupVaultRequest, +def test_update_backup_rest_required_fields( + request_type=gcn_backup.UpdateBackupRequest, ): transport_class = transports.NetAppRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -31800,21 +35018,19 @@ def test_delete_backup_vault_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_backup_vault._get_unset_required_fields(jsonified_request) + ).update_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_backup_vault._get_unset_required_fields(jsonified_request) + ).update_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -31835,9 +35051,10 @@ def test_delete_backup_vault_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -31848,23 +35065,31 @@ def test_delete_backup_vault_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_backup_vault(request) + response = client.update_backup(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_backup_vault_rest_unset_required_fields(): +def test_update_backup_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_backup_vault._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_backup._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "updateMask", + "backup", + ) + ) + ) -def test_delete_backup_vault_rest_flattened(): +def test_update_backup_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -31877,12 +35102,15 @@ def test_delete_backup_vault_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/backupVaults/sample3" + "backup": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + backup=gcn_backup.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -31894,20 +35122,20 @@ def test_delete_backup_vault_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_backup_vault(**mock_args) + client.update_backup(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/backupVaults/*}" + "%s/v1/{backup.name=projects/*/locations/*/backupVaults/*/backups/*}" % client.transport._host, args[1], ) -def test_delete_backup_vault_rest_flattened_error(transport: str = "rest"): +def test_update_backup_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -31916,13 +35144,14 @@ def test_delete_backup_vault_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_backup_vault( - backup_vault.DeleteBackupVaultRequest(), - name="name_value", + client.update_backup( + gcn_backup.UpdateBackupRequest(), + backup=gcn_backup.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_backup_rest_use_cached_wrapped_rpc(): +def test_create_backup_policy_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -31936,17 +35165,21 @@ def test_create_backup_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_backup in client._transport._wrapped_methods + assert ( + client._transport.create_backup_policy in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_backup] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_backup_policy + ] = mock_rpc request = {} - client.create_backup(request) + client.create_backup_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -31955,21 +35188,21 @@ def test_create_backup_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_backup(request) + client.create_backup_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_backup_rest_required_fields( - request_type=gcn_backup.CreateBackupRequest, +def test_create_backup_policy_rest_required_fields( + request_type=gcn_backup_policy.CreateBackupPolicyRequest, ): transport_class = transports.NetAppRestTransport request_init = {} request_init["parent"] = "" - request_init["backup_id"] = "" + request_init["backup_policy_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -31977,32 +35210,32 @@ def test_create_backup_rest_required_fields( ) # verify fields with default values are dropped - assert "backupId" not in jsonified_request + assert "backupPolicyId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_backup._get_unset_required_fields(jsonified_request) + ).create_backup_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "backupId" in jsonified_request - assert jsonified_request["backupId"] == request_init["backup_id"] + assert "backupPolicyId" in jsonified_request + assert jsonified_request["backupPolicyId"] == request_init["backup_policy_id"] jsonified_request["parent"] = "parent_value" - jsonified_request["backupId"] = "backup_id_value" + jsonified_request["backupPolicyId"] = "backup_policy_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_backup._get_unset_required_fields(jsonified_request) + ).create_backup_policy._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("backup_id",)) + assert not set(unset_fields) - set(("backup_policy_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "backupId" in jsonified_request - assert jsonified_request["backupId"] == "backup_id_value" + assert "backupPolicyId" in jsonified_request + assert jsonified_request["backupPolicyId"] == "backup_policy_id_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -32037,11 +35270,11 @@ def test_create_backup_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_backup(request) + response = client.create_backup_policy(request) expected_params = [ ( - "backupId", + "backupPolicyId", "", ), ("$alt", "json;enum-encoding=int"), @@ -32050,25 +35283,25 @@ def test_create_backup_rest_required_fields( assert expected_params == actual_params -def test_create_backup_rest_unset_required_fields(): +def test_create_backup_policy_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_backup._get_unset_required_fields({}) + unset_fields = transport.create_backup_policy._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("backupId",)) + set(("backupPolicyId",)) & set( ( "parent", - "backupId", - "backup", + "backupPolicy", + "backupPolicyId", ) ) ) -def test_create_backup_rest_flattened(): +def test_create_backup_policy_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -32080,15 +35313,13 @@ def test_create_backup_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/backupVaults/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( parent="parent_value", - backup=gcn_backup.Backup(name="name_value"), - backup_id="backup_id_value", + backup_policy=gcn_backup_policy.BackupPolicy(name="name_value"), + backup_policy_id="backup_policy_id_value", ) mock_args.update(sample_request) @@ -32100,20 +35331,20 @@ def test_create_backup_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_backup(**mock_args) + client.create_backup_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/backupVaults/*}/backups" + "%s/v1/{parent=projects/*/locations/*}/backupPolicies" % client.transport._host, args[1], ) -def test_create_backup_rest_flattened_error(transport: str = "rest"): +def test_create_backup_policy_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -32122,15 +35353,15 @@ def test_create_backup_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_backup( - gcn_backup.CreateBackupRequest(), + client.create_backup_policy( + gcn_backup_policy.CreateBackupPolicyRequest(), parent="parent_value", - backup=gcn_backup.Backup(name="name_value"), - backup_id="backup_id_value", + backup_policy=gcn_backup_policy.BackupPolicy(name="name_value"), + backup_policy_id="backup_policy_id_value", ) -def test_get_backup_rest_use_cached_wrapped_rpc(): +def test_get_backup_policy_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -32144,29 +35375,33 @@ def test_get_backup_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_backup in client._transport._wrapped_methods + assert client._transport.get_backup_policy in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_backup_policy + ] = mock_rpc request = {} - client.get_backup(request) + client.get_backup_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_backup(request) + client.get_backup_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_backup_rest_required_fields(request_type=backup.GetBackupRequest): +def test_get_backup_policy_rest_required_fields( + request_type=backup_policy.GetBackupPolicyRequest, +): transport_class = transports.NetAppRestTransport request_init = {} @@ -32181,7 +35416,7 @@ def test_get_backup_rest_required_fields(request_type=backup.GetBackupRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_backup._get_unset_required_fields(jsonified_request) + ).get_backup_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -32190,7 +35425,7 @@ def test_get_backup_rest_required_fields(request_type=backup.GetBackupRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_backup._get_unset_required_fields(jsonified_request) + ).get_backup_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -32204,7 +35439,7 @@ def test_get_backup_rest_required_fields(request_type=backup.GetBackupRequest): request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = backup.Backup() + return_value = backup_policy.BackupPolicy() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -32225,30 +35460,30 @@ def test_get_backup_rest_required_fields(request_type=backup.GetBackupRequest): response_value.status_code = 200 # Convert return value to protobuf type - return_value = backup.Backup.pb(return_value) + return_value = backup_policy.BackupPolicy.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_backup(request) + response = client.get_backup_policy(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_backup_rest_unset_required_fields(): +def test_get_backup_policy_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_backup._get_unset_required_fields({}) + unset_fields = transport.get_backup_policy._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_backup_rest_flattened(): +def test_get_backup_policy_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -32257,11 +35492,11 @@ def test_get_backup_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backup.Backup() + return_value = backup_policy.BackupPolicy() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4" + "name": "projects/sample1/locations/sample2/backupPolicies/sample3" } # get truthy value for each flattened field @@ -32274,26 +35509,26 @@ def test_get_backup_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = backup.Backup.pb(return_value) + return_value = backup_policy.BackupPolicy.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_backup(**mock_args) + client.get_backup_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/backupVaults/*/backups/*}" + "%s/v1/{name=projects/*/locations/*/backupPolicies/*}" % client.transport._host, args[1], ) -def test_get_backup_rest_flattened_error(transport: str = "rest"): +def test_get_backup_policy_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -32302,13 +35537,13 @@ def test_get_backup_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_backup( - backup.GetBackupRequest(), + client.get_backup_policy( + backup_policy.GetBackupPolicyRequest(), name="name_value", ) -def test_list_backups_rest_use_cached_wrapped_rpc(): +def test_list_backup_policies_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -32322,29 +35557,35 @@ def test_list_backups_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_backups in client._transport._wrapped_methods + assert ( + client._transport.list_backup_policies in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_backup_policies + ] = mock_rpc request = {} - client.list_backups(request) + client.list_backup_policies(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_backups(request) + client.list_backup_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_backups_rest_required_fields(request_type=backup.ListBackupsRequest): +def test_list_backup_policies_rest_required_fields( + request_type=backup_policy.ListBackupPoliciesRequest, +): transport_class = transports.NetAppRestTransport request_init = {} @@ -32359,7 +35600,7 @@ def test_list_backups_rest_required_fields(request_type=backup.ListBackupsReques unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_backups._get_unset_required_fields(jsonified_request) + ).list_backup_policies._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -32368,7 +35609,7 @@ def test_list_backups_rest_required_fields(request_type=backup.ListBackupsReques unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_backups._get_unset_required_fields(jsonified_request) + ).list_backup_policies._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -32391,7 +35632,7 @@ def test_list_backups_rest_required_fields(request_type=backup.ListBackupsReques request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = backup.ListBackupsResponse() + return_value = backup_policy.ListBackupPoliciesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -32412,26 +35653,26 @@ def test_list_backups_rest_required_fields(request_type=backup.ListBackupsReques response_value.status_code = 200 # Convert return value to protobuf type - return_value = backup.ListBackupsResponse.pb(return_value) + return_value = backup_policy.ListBackupPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backups(request) + response = client.list_backup_policies(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_backups_rest_unset_required_fields(): +def test_list_backup_policies_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_backups._get_unset_required_fields({}) + unset_fields = transport.list_backup_policies._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( @@ -32445,7 +35686,7 @@ def test_list_backups_rest_unset_required_fields(): ) -def test_list_backups_rest_flattened(): +def test_list_backup_policies_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -32454,12 +35695,10 @@ def test_list_backups_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backup.ListBackupsResponse() + return_value = backup_policy.ListBackupPoliciesResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/backupVaults/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( @@ -32471,26 +35710,26 @@ def test_list_backups_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = backup.ListBackupsResponse.pb(return_value) + return_value = backup_policy.ListBackupPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backups(**mock_args) + client.list_backup_policies(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/backupVaults/*}/backups" + "%s/v1/{parent=projects/*/locations/*}/backupPolicies" % client.transport._host, args[1], ) -def test_list_backups_rest_flattened_error(transport: str = "rest"): +def test_list_backup_policies_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -32499,13 +35738,13 @@ def test_list_backups_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_backups( - backup.ListBackupsRequest(), + client.list_backup_policies( + backup_policy.ListBackupPoliciesRequest(), parent="parent_value", ) -def test_list_backups_rest_pager(transport: str = "rest"): +def test_list_backup_policies_rest_pager(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -32517,28 +35756,28 @@ def test_list_backups_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - backup.Backup(), + backup_policy.ListBackupPoliciesResponse( + backup_policies=[ + backup_policy.BackupPolicy(), + backup_policy.BackupPolicy(), + backup_policy.BackupPolicy(), ], next_page_token="abc", ), - backup.ListBackupsResponse( - backups=[], + backup_policy.ListBackupPoliciesResponse( + backup_policies=[], next_page_token="def", ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), + backup_policy.ListBackupPoliciesResponse( + backup_policies=[ + backup_policy.BackupPolicy(), ], next_page_token="ghi", ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), + backup_policy.ListBackupPoliciesResponse( + backup_policies=[ + backup_policy.BackupPolicy(), + backup_policy.BackupPolicy(), ], ), ) @@ -32546,29 +35785,29 @@ def test_list_backups_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple(backup.ListBackupsResponse.to_json(x) for x in response) + response = tuple( + backup_policy.ListBackupPoliciesResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = { - "parent": "projects/sample1/locations/sample2/backupVaults/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} - pager = client.list_backups(request=sample_request) + pager = client.list_backup_policies(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, backup.Backup) for i in results) + assert all(isinstance(i, backup_policy.BackupPolicy) for i in results) - pages = list(client.list_backups(request=sample_request).pages) + pages = list(client.list_backup_policies(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -def test_delete_backup_rest_use_cached_wrapped_rpc(): +def test_update_backup_policy_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -32582,17 +35821,21 @@ def test_delete_backup_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_backup in client._transport._wrapped_methods + assert ( + client._transport.update_backup_policy in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_backup_policy + ] = mock_rpc request = {} - client.delete_backup(request) + client.update_backup_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -32601,18 +35844,19 @@ def test_delete_backup_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_backup(request) + client.update_backup_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_backup_rest_required_fields(request_type=backup.DeleteBackupRequest): +def test_update_backup_policy_rest_required_fields( + request_type=gcn_backup_policy.UpdateBackupPolicyRequest, +): transport_class = transports.NetAppRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -32623,21 +35867,19 @@ def test_delete_backup_rest_required_fields(request_type=backup.DeleteBackupRequ unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_backup._get_unset_required_fields(jsonified_request) + ).update_backup_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_backup._get_unset_required_fields(jsonified_request) + ).update_backup_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -32658,9 +35900,10 @@ def test_delete_backup_rest_required_fields(request_type=backup.DeleteBackupRequ pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -32671,23 +35914,31 @@ def test_delete_backup_rest_required_fields(request_type=backup.DeleteBackupRequ req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_backup(request) + response = client.update_backup_policy(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_backup_rest_unset_required_fields(): +def test_update_backup_policy_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_backup._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_backup_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "updateMask", + "backupPolicy", + ) + ) + ) -def test_delete_backup_rest_flattened(): +def test_update_backup_policy_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -32700,12 +35951,15 @@ def test_delete_backup_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4" + "backup_policy": { + "name": "projects/sample1/locations/sample2/backupPolicies/sample3" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + backup_policy=gcn_backup_policy.BackupPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -32717,20 +35971,20 @@ def test_delete_backup_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_backup(**mock_args) + client.update_backup_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/backupVaults/*/backups/*}" + "%s/v1/{backup_policy.name=projects/*/locations/*/backupPolicies/*}" % client.transport._host, args[1], ) -def test_delete_backup_rest_flattened_error(transport: str = "rest"): +def test_update_backup_policy_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -32739,13 +35993,14 @@ def test_delete_backup_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_backup( - backup.DeleteBackupRequest(), - name="name_value", + client.update_backup_policy( + gcn_backup_policy.UpdateBackupPolicyRequest(), + backup_policy=gcn_backup_policy.BackupPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_update_backup_rest_use_cached_wrapped_rpc(): +def test_delete_backup_policy_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -32759,17 +36014,21 @@ def test_update_backup_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_backup in client._transport._wrapped_methods + assert ( + client._transport.delete_backup_policy in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc + client._transport._wrapped_methods[ + client._transport.delete_backup_policy + ] = mock_rpc request = {} - client.update_backup(request) + client.delete_backup_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -32778,19 +36037,20 @@ def test_update_backup_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_backup(request) + client.delete_backup_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_backup_rest_required_fields( - request_type=gcn_backup.UpdateBackupRequest, +def test_delete_backup_policy_rest_required_fields( + request_type=backup_policy.DeleteBackupPolicyRequest, ): transport_class = transports.NetAppRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -32801,19 +36061,21 @@ def test_update_backup_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_backup._get_unset_required_fields(jsonified_request) + ).delete_backup_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_backup._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).delete_backup_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -32834,10 +36096,9 @@ def test_update_backup_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -32848,31 +36109,23 @@ def test_update_backup_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_backup(request) + response = client.delete_backup_policy(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_backup_rest_unset_required_fields(): +def test_delete_backup_policy_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_backup._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "updateMask", - "backup", - ) - ) - ) + unset_fields = transport.delete_backup_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_update_backup_rest_flattened(): +def test_delete_backup_policy_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -32885,15 +36138,12 @@ def test_update_backup_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "backup": { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4" - } + "name": "projects/sample1/locations/sample2/backupPolicies/sample3" } # get truthy value for each flattened field mock_args = dict( - backup=gcn_backup.Backup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -32905,20 +36155,20 @@ def test_update_backup_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_backup(**mock_args) + client.delete_backup_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{backup.name=projects/*/locations/*/backupVaults/*/backups/*}" + "%s/v1/{name=projects/*/locations/*/backupPolicies/*}" % client.transport._host, args[1], ) -def test_update_backup_rest_flattened_error(transport: str = "rest"): +def test_delete_backup_policy_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -32927,14 +36177,13 @@ def test_update_backup_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_backup( - gcn_backup.UpdateBackupRequest(), - backup=gcn_backup.Backup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_backup_policy( + backup_policy.DeleteBackupPolicyRequest(), + name="name_value", ) -def test_create_backup_policy_rest_use_cached_wrapped_rpc(): +def test_list_quota_rules_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -32948,9 +36197,7 @@ def test_create_backup_policy_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_backup_policy in client._transport._wrapped_methods - ) + assert client._transport.list_quota_rules in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -32958,34 +36205,29 @@ def test_create_backup_policy_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_backup_policy + client._transport.list_quota_rules ] = mock_rpc request = {} - client.create_backup_policy(request) + client.list_quota_rules(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_backup_policy(request) + client.list_quota_rules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_backup_policy_rest_required_fields( - request_type=gcn_backup_policy.CreateBackupPolicyRequest, +def test_list_quota_rules_rest_required_fields( + request_type=quota_rule.ListQuotaRulesRequest, ): transport_class = transports.NetAppRestTransport request_init = {} request_init["parent"] = "" - request_init["backup_policy_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -32993,32 +36235,33 @@ def test_create_backup_policy_rest_required_fields( ) # verify fields with default values are dropped - assert "backupPolicyId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_backup_policy._get_unset_required_fields(jsonified_request) + ).list_quota_rules._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "backupPolicyId" in jsonified_request - assert jsonified_request["backupPolicyId"] == request_init["backup_policy_id"] jsonified_request["parent"] = "parent_value" - jsonified_request["backupPolicyId"] = "backup_policy_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_backup_policy._get_unset_required_fields(jsonified_request) + ).list_quota_rules._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("backup_policy_id",)) + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "backupPolicyId" in jsonified_request - assert jsonified_request["backupPolicyId"] == "backup_policy_id_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -33027,7 +36270,7 @@ def test_create_backup_policy_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = quota_rule.ListQuotaRulesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -33039,52 +36282,49 @@ def test_create_backup_policy_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = quota_rule.ListQuotaRulesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_backup_policy(request) + response = client.list_quota_rules(request) - expected_params = [ - ( - "backupPolicyId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_backup_policy_rest_unset_required_fields(): +def test_list_quota_rules_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_backup_policy._get_unset_required_fields({}) + unset_fields = transport.list_quota_rules._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("backupPolicyId",)) - & set( + set( ( - "parent", - "backupPolicy", - "backupPolicyId", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) -def test_create_backup_policy_rest_flattened(): +def test_list_quota_rules_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -33093,41 +36333,43 @@ def test_create_backup_policy_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = quota_rule.ListQuotaRulesResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/volumes/sample3" + } # get truthy value for each flattened field mock_args = dict( parent="parent_value", - backup_policy=gcn_backup_policy.BackupPolicy(name="name_value"), - backup_policy_id="backup_policy_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = quota_rule.ListQuotaRulesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_backup_policy(**mock_args) + client.list_quota_rules(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/backupPolicies" + "%s/v1/{parent=projects/*/locations/*/volumes/*}/quotaRules" % client.transport._host, args[1], ) -def test_create_backup_policy_rest_flattened_error(transport: str = "rest"): +def test_list_quota_rules_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -33136,15 +36378,76 @@ def test_create_backup_policy_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_backup_policy( - gcn_backup_policy.CreateBackupPolicyRequest(), + client.list_quota_rules( + quota_rule.ListQuotaRulesRequest(), parent="parent_value", - backup_policy=gcn_backup_policy.BackupPolicy(name="name_value"), - backup_policy_id="backup_policy_id_value", ) -def test_get_backup_policy_rest_use_cached_wrapped_rpc(): +def test_list_quota_rules_rest_pager(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + quota_rule.ListQuotaRulesResponse( + quota_rules=[ + quota_rule.QuotaRule(), + quota_rule.QuotaRule(), + quota_rule.QuotaRule(), + ], + next_page_token="abc", + ), + quota_rule.ListQuotaRulesResponse( + quota_rules=[], + next_page_token="def", + ), + quota_rule.ListQuotaRulesResponse( + quota_rules=[ + quota_rule.QuotaRule(), + ], + next_page_token="ghi", + ), + quota_rule.ListQuotaRulesResponse( + quota_rules=[ + quota_rule.QuotaRule(), + quota_rule.QuotaRule(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(quota_rule.ListQuotaRulesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/volumes/sample3" + } + + pager = client.list_quota_rules(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, quota_rule.QuotaRule) for i in results) + + pages = list(client.list_quota_rules(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_quota_rule_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -33158,32 +36461,30 @@ def test_get_backup_policy_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_backup_policy in client._transport._wrapped_methods + assert client._transport.get_quota_rule in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_backup_policy - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_quota_rule] = mock_rpc request = {} - client.get_backup_policy(request) + client.get_quota_rule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_backup_policy(request) + client.get_quota_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_backup_policy_rest_required_fields( - request_type=backup_policy.GetBackupPolicyRequest, +def test_get_quota_rule_rest_required_fields( + request_type=quota_rule.GetQuotaRuleRequest, ): transport_class = transports.NetAppRestTransport @@ -33199,7 +36500,7 @@ def test_get_backup_policy_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_backup_policy._get_unset_required_fields(jsonified_request) + ).get_quota_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -33208,7 +36509,7 @@ def test_get_backup_policy_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_backup_policy._get_unset_required_fields(jsonified_request) + ).get_quota_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -33222,7 +36523,7 @@ def test_get_backup_policy_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = backup_policy.BackupPolicy() + return_value = quota_rule.QuotaRule() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -33243,30 +36544,30 @@ def test_get_backup_policy_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = backup_policy.BackupPolicy.pb(return_value) + return_value = quota_rule.QuotaRule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_backup_policy(request) + response = client.get_quota_rule(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_backup_policy_rest_unset_required_fields(): +def test_get_quota_rule_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_backup_policy._get_unset_required_fields({}) + unset_fields = transport.get_quota_rule._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_backup_policy_rest_flattened(): +def test_get_quota_rule_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -33275,11 +36576,11 @@ def test_get_backup_policy_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backup_policy.BackupPolicy() + return_value = quota_rule.QuotaRule() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/backupPolicies/sample3" + "name": "projects/sample1/locations/sample2/volumes/sample3/quotaRules/sample4" } # get truthy value for each flattened field @@ -33292,26 +36593,26 @@ def test_get_backup_policy_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = backup_policy.BackupPolicy.pb(return_value) + return_value = quota_rule.QuotaRule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_backup_policy(**mock_args) + client.get_quota_rule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/backupPolicies/*}" + "%s/v1/{name=projects/*/locations/*/volumes/*/quotaRules/*}" % client.transport._host, args[1], ) -def test_get_backup_policy_rest_flattened_error(transport: str = "rest"): +def test_get_quota_rule_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -33320,13 +36621,13 @@ def test_get_backup_policy_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_backup_policy( - backup_policy.GetBackupPolicyRequest(), + client.get_quota_rule( + quota_rule.GetQuotaRuleRequest(), name="name_value", ) -def test_list_backup_policies_rest_use_cached_wrapped_rpc(): +def test_create_quota_rule_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -33340,9 +36641,7 @@ def test_list_backup_policies_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_backup_policies in client._transport._wrapped_methods - ) + assert client._transport.create_quota_rule in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -33350,29 +36649,34 @@ def test_list_backup_policies_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_backup_policies + client._transport.create_quota_rule ] = mock_rpc request = {} - client.list_backup_policies(request) + client.create_quota_rule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_backup_policies(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_quota_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_backup_policies_rest_required_fields( - request_type=backup_policy.ListBackupPoliciesRequest, +def test_create_quota_rule_rest_required_fields( + request_type=gcn_quota_rule.CreateQuotaRuleRequest, ): transport_class = transports.NetAppRestTransport request_init = {} request_init["parent"] = "" + request_init["quota_rule_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -33380,33 +36684,32 @@ def test_list_backup_policies_rest_required_fields( ) # verify fields with default values are dropped + assert "quotaRuleId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_backup_policies._get_unset_required_fields(jsonified_request) + ).create_quota_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "quotaRuleId" in jsonified_request + assert jsonified_request["quotaRuleId"] == request_init["quota_rule_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["quotaRuleId"] = "quota_rule_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_backup_policies._get_unset_required_fields(jsonified_request) + ).create_quota_rule._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("quota_rule_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "quotaRuleId" in jsonified_request + assert jsonified_request["quotaRuleId"] == "quota_rule_id_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -33415,7 +36718,7 @@ def test_list_backup_policies_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = backup_policy.ListBackupPoliciesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -33427,49 +36730,52 @@ def test_list_backup_policies_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup_policy.ListBackupPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backup_policies(request) + response = client.create_quota_rule(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "quotaRuleId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_backup_policies_rest_unset_required_fields(): +def test_create_quota_rule_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_backup_policies._get_unset_required_fields({}) + unset_fields = transport.create_quota_rule._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(("quotaRuleId",)) + & set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "parent", + "quotaRule", + "quotaRuleId", ) ) - & set(("parent",)) ) -def test_list_backup_policies_rest_flattened(): +def test_create_quota_rule_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -33478,41 +36784,43 @@ def test_list_backup_policies_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backup_policy.ListBackupPoliciesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/volumes/sample3" + } # get truthy value for each flattened field mock_args = dict( parent="parent_value", + quota_rule=gcn_quota_rule.QuotaRule(name="name_value"), + quota_rule_id="quota_rule_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backup_policy.ListBackupPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backup_policies(**mock_args) + client.create_quota_rule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/backupPolicies" + "%s/v1/{parent=projects/*/locations/*/volumes/*}/quotaRules" % client.transport._host, args[1], ) -def test_list_backup_policies_rest_flattened_error(transport: str = "rest"): +def test_create_quota_rule_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -33521,76 +36829,15 @@ def test_list_backup_policies_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_backup_policies( - backup_policy.ListBackupPoliciesRequest(), + client.create_quota_rule( + gcn_quota_rule.CreateQuotaRuleRequest(), parent="parent_value", + quota_rule=gcn_quota_rule.QuotaRule(name="name_value"), + quota_rule_id="quota_rule_id_value", ) -def test_list_backup_policies_rest_pager(transport: str = "rest"): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - backup_policy.ListBackupPoliciesResponse( - backup_policies=[ - backup_policy.BackupPolicy(), - backup_policy.BackupPolicy(), - backup_policy.BackupPolicy(), - ], - next_page_token="abc", - ), - backup_policy.ListBackupPoliciesResponse( - backup_policies=[], - next_page_token="def", - ), - backup_policy.ListBackupPoliciesResponse( - backup_policies=[ - backup_policy.BackupPolicy(), - ], - next_page_token="ghi", - ), - backup_policy.ListBackupPoliciesResponse( - backup_policies=[ - backup_policy.BackupPolicy(), - backup_policy.BackupPolicy(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - backup_policy.ListBackupPoliciesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_backup_policies(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backup_policy.BackupPolicy) for i in results) - - pages = list(client.list_backup_policies(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_update_backup_policy_rest_use_cached_wrapped_rpc(): +def test_update_quota_rule_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -33604,9 +36851,7 @@ def test_update_backup_policy_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_backup_policy in client._transport._wrapped_methods - ) + assert client._transport.update_quota_rule in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -33614,11 +36859,11 @@ def test_update_backup_policy_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_backup_policy + client._transport.update_quota_rule ] = mock_rpc request = {} - client.update_backup_policy(request) + client.update_quota_rule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -33627,15 +36872,15 @@ def test_update_backup_policy_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_backup_policy(request) + client.update_quota_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_backup_policy_rest_required_fields( - request_type=gcn_backup_policy.UpdateBackupPolicyRequest, +def test_update_quota_rule_rest_required_fields( + request_type=gcn_quota_rule.UpdateQuotaRuleRequest, ): transport_class = transports.NetAppRestTransport @@ -33650,14 +36895,14 @@ def test_update_backup_policy_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_backup_policy._get_unset_required_fields(jsonified_request) + ).update_quota_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_backup_policy._get_unset_required_fields(jsonified_request) + ).update_quota_rule._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) @@ -33697,31 +36942,23 @@ def test_update_backup_policy_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_backup_policy(request) + response = client.update_quota_rule(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_backup_policy_rest_unset_required_fields(): +def test_update_quota_rule_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_backup_policy._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "updateMask", - "backupPolicy", - ) - ) - ) + unset_fields = transport.update_quota_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("quotaRule",))) -def test_update_backup_policy_rest_flattened(): +def test_update_quota_rule_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -33734,14 +36971,14 @@ def test_update_backup_policy_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "backup_policy": { - "name": "projects/sample1/locations/sample2/backupPolicies/sample3" + "quota_rule": { + "name": "projects/sample1/locations/sample2/volumes/sample3/quotaRules/sample4" } } # get truthy value for each flattened field mock_args = dict( - backup_policy=gcn_backup_policy.BackupPolicy(name="name_value"), + quota_rule=gcn_quota_rule.QuotaRule(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -33754,20 +36991,20 @@ def test_update_backup_policy_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_backup_policy(**mock_args) + client.update_quota_rule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{backup_policy.name=projects/*/locations/*/backupPolicies/*}" + "%s/v1/{quota_rule.name=projects/*/locations/*/volumes/*/quotaRules/*}" % client.transport._host, args[1], ) -def test_update_backup_policy_rest_flattened_error(transport: str = "rest"): +def test_update_quota_rule_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -33776,14 +37013,14 @@ def test_update_backup_policy_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_backup_policy( - gcn_backup_policy.UpdateBackupPolicyRequest(), - backup_policy=gcn_backup_policy.BackupPolicy(name="name_value"), + client.update_quota_rule( + gcn_quota_rule.UpdateQuotaRuleRequest(), + quota_rule=gcn_quota_rule.QuotaRule(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_delete_backup_policy_rest_use_cached_wrapped_rpc(): +def test_delete_quota_rule_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -33797,9 +37034,7 @@ def test_delete_backup_policy_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_backup_policy in client._transport._wrapped_methods - ) + assert client._transport.delete_quota_rule in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -33807,11 +37042,11 @@ def test_delete_backup_policy_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_backup_policy + client._transport.delete_quota_rule ] = mock_rpc request = {} - client.delete_backup_policy(request) + client.delete_quota_rule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -33820,15 +37055,15 @@ def test_delete_backup_policy_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_backup_policy(request) + client.delete_quota_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_backup_policy_rest_required_fields( - request_type=backup_policy.DeleteBackupPolicyRequest, +def test_delete_quota_rule_rest_required_fields( + request_type=quota_rule.DeleteQuotaRuleRequest, ): transport_class = transports.NetAppRestTransport @@ -33844,7 +37079,7 @@ def test_delete_backup_policy_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_backup_policy._get_unset_required_fields(jsonified_request) + ).delete_quota_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -33853,7 +37088,7 @@ def test_delete_backup_policy_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_backup_policy._get_unset_required_fields(jsonified_request) + ).delete_quota_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -33892,23 +37127,23 @@ def test_delete_backup_policy_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_backup_policy(request) + response = client.delete_quota_rule(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_backup_policy_rest_unset_required_fields(): +def test_delete_quota_rule_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_backup_policy._get_unset_required_fields({}) + unset_fields = transport.delete_quota_rule._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_backup_policy_rest_flattened(): +def test_delete_quota_rule_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -33921,7 +37156,7 @@ def test_delete_backup_policy_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/backupPolicies/sample3" + "name": "projects/sample1/locations/sample2/volumes/sample3/quotaRules/sample4" } # get truthy value for each flattened field @@ -33938,20 +37173,20 @@ def test_delete_backup_policy_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_backup_policy(**mock_args) + client.delete_quota_rule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/backupPolicies/*}" + "%s/v1/{name=projects/*/locations/*/volumes/*/quotaRules/*}" % client.transport._host, args[1], ) -def test_delete_backup_policy_rest_flattened_error(transport: str = "rest"): +def test_delete_quota_rule_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -33960,13 +37195,13 @@ def test_delete_backup_policy_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_backup_policy( - backup_policy.DeleteBackupPolicyRequest(), + client.delete_quota_rule( + quota_rule.DeleteQuotaRuleRequest(), name="name_value", ) -def test_list_quota_rules_rest_use_cached_wrapped_rpc(): +def test_restore_backup_files_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -33980,7 +37215,9 @@ def test_list_quota_rules_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_quota_rules in client._transport._wrapped_methods + assert ( + client._transport.restore_backup_files in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -33988,24 +37225,165 @@ def test_list_quota_rules_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_quota_rules + client._transport.restore_backup_files ] = mock_rpc request = {} - client.list_quota_rules(request) + client.restore_backup_files(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_quota_rules(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.restore_backup_files(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_quota_rules_rest_required_fields( - request_type=quota_rule.ListQuotaRulesRequest, +def test_restore_backup_files_rest_required_fields( + request_type=volume.RestoreBackupFilesRequest, +): + transport_class = transports.NetAppRestTransport + + request_init = {} + request_init["name"] = "" + request_init["backup"] = "" + request_init["file_list"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restore_backup_files._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["backup"] = "backup_value" + jsonified_request["fileList"] = "file_list_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restore_backup_files._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "backup" in jsonified_request + assert jsonified_request["backup"] == "backup_value" + assert "fileList" in jsonified_request + assert jsonified_request["fileList"] == "file_list_value" + + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.restore_backup_files(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_restore_backup_files_rest_unset_required_fields(): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.restore_backup_files._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "backup", + "fileList", + ) + ) + ) + + +def test_list_host_groups_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_host_groups in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_host_groups + ] = mock_rpc + + request = {} + client.list_host_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_host_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_host_groups_rest_required_fields( + request_type=host_group.ListHostGroupsRequest, ): transport_class = transports.NetAppRestTransport @@ -34021,7 +37399,7 @@ def test_list_quota_rules_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_quota_rules._get_unset_required_fields(jsonified_request) + ).list_host_groups._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -34030,7 +37408,7 @@ def test_list_quota_rules_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_quota_rules._get_unset_required_fields(jsonified_request) + ).list_host_groups._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -34053,7 +37431,7 @@ def test_list_quota_rules_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = quota_rule.ListQuotaRulesResponse() + return_value = host_group.ListHostGroupsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -34074,26 +37452,26 @@ def test_list_quota_rules_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = quota_rule.ListQuotaRulesResponse.pb(return_value) + return_value = host_group.ListHostGroupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_quota_rules(request) + response = client.list_host_groups(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_quota_rules_rest_unset_required_fields(): +def test_list_host_groups_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_quota_rules._get_unset_required_fields({}) + unset_fields = transport.list_host_groups._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( @@ -34107,7 +37485,7 @@ def test_list_quota_rules_rest_unset_required_fields(): ) -def test_list_quota_rules_rest_flattened(): +def test_list_host_groups_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -34116,12 +37494,10 @@ def test_list_quota_rules_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = quota_rule.ListQuotaRulesResponse() + return_value = host_group.ListHostGroupsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/volumes/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( @@ -34133,26 +37509,25 @@ def test_list_quota_rules_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = quota_rule.ListQuotaRulesResponse.pb(return_value) + return_value = host_group.ListHostGroupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_quota_rules(**mock_args) + client.list_host_groups(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/volumes/*}/quotaRules" - % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/hostGroups" % client.transport._host, args[1], ) -def test_list_quota_rules_rest_flattened_error(transport: str = "rest"): +def test_list_host_groups_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -34161,13 +37536,13 @@ def test_list_quota_rules_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_quota_rules( - quota_rule.ListQuotaRulesRequest(), + client.list_host_groups( + host_group.ListHostGroupsRequest(), parent="parent_value", ) -def test_list_quota_rules_rest_pager(transport: str = "rest"): +def test_list_host_groups_rest_pager(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -34179,28 +37554,28 @@ def test_list_quota_rules_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - quota_rule.ListQuotaRulesResponse( - quota_rules=[ - quota_rule.QuotaRule(), - quota_rule.QuotaRule(), - quota_rule.QuotaRule(), + host_group.ListHostGroupsResponse( + host_groups=[ + host_group.HostGroup(), + host_group.HostGroup(), + host_group.HostGroup(), ], next_page_token="abc", ), - quota_rule.ListQuotaRulesResponse( - quota_rules=[], + host_group.ListHostGroupsResponse( + host_groups=[], next_page_token="def", ), - quota_rule.ListQuotaRulesResponse( - quota_rules=[ - quota_rule.QuotaRule(), + host_group.ListHostGroupsResponse( + host_groups=[ + host_group.HostGroup(), ], next_page_token="ghi", ), - quota_rule.ListQuotaRulesResponse( - quota_rules=[ - quota_rule.QuotaRule(), - quota_rule.QuotaRule(), + host_group.ListHostGroupsResponse( + host_groups=[ + host_group.HostGroup(), + host_group.HostGroup(), ], ), ) @@ -34208,29 +37583,27 @@ def test_list_quota_rules_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple(quota_rule.ListQuotaRulesResponse.to_json(x) for x in response) + response = tuple(host_group.ListHostGroupsResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = { - "parent": "projects/sample1/locations/sample2/volumes/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} - pager = client.list_quota_rules(request=sample_request) + pager = client.list_host_groups(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, quota_rule.QuotaRule) for i in results) + assert all(isinstance(i, host_group.HostGroup) for i in results) - pages = list(client.list_quota_rules(request=sample_request).pages) + pages = list(client.list_host_groups(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -def test_get_quota_rule_rest_use_cached_wrapped_rpc(): +def test_get_host_group_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -34244,30 +37617,30 @@ def test_get_quota_rule_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_quota_rule in client._transport._wrapped_methods + assert client._transport.get_host_group in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_quota_rule] = mock_rpc + client._transport._wrapped_methods[client._transport.get_host_group] = mock_rpc request = {} - client.get_quota_rule(request) + client.get_host_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_quota_rule(request) + client.get_host_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_quota_rule_rest_required_fields( - request_type=quota_rule.GetQuotaRuleRequest, +def test_get_host_group_rest_required_fields( + request_type=host_group.GetHostGroupRequest, ): transport_class = transports.NetAppRestTransport @@ -34283,7 +37656,7 @@ def test_get_quota_rule_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_quota_rule._get_unset_required_fields(jsonified_request) + ).get_host_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -34292,7 +37665,7 @@ def test_get_quota_rule_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_quota_rule._get_unset_required_fields(jsonified_request) + ).get_host_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -34306,7 +37679,7 @@ def test_get_quota_rule_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = quota_rule.QuotaRule() + return_value = host_group.HostGroup() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -34327,30 +37700,30 @@ def test_get_quota_rule_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = quota_rule.QuotaRule.pb(return_value) + return_value = host_group.HostGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_quota_rule(request) + response = client.get_host_group(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_quota_rule_rest_unset_required_fields(): +def test_get_host_group_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_quota_rule._get_unset_required_fields({}) + unset_fields = transport.get_host_group._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_quota_rule_rest_flattened(): +def test_get_host_group_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -34359,11 +37732,11 @@ def test_get_quota_rule_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = quota_rule.QuotaRule() + return_value = host_group.HostGroup() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/volumes/sample3/quotaRules/sample4" + "name": "projects/sample1/locations/sample2/hostGroups/sample3" } # get truthy value for each flattened field @@ -34376,26 +37749,25 @@ def test_get_quota_rule_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = quota_rule.QuotaRule.pb(return_value) + return_value = host_group.HostGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_quota_rule(**mock_args) + client.get_host_group(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/volumes/*/quotaRules/*}" - % client.transport._host, + "%s/v1/{name=projects/*/locations/*/hostGroups/*}" % client.transport._host, args[1], ) -def test_get_quota_rule_rest_flattened_error(transport: str = "rest"): +def test_get_host_group_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -34404,13 +37776,13 @@ def test_get_quota_rule_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_quota_rule( - quota_rule.GetQuotaRuleRequest(), + client.get_host_group( + host_group.GetHostGroupRequest(), name="name_value", ) -def test_create_quota_rule_rest_use_cached_wrapped_rpc(): +def test_create_host_group_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -34424,7 +37796,7 @@ def test_create_quota_rule_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_quota_rule in client._transport._wrapped_methods + assert client._transport.create_host_group in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -34432,11 +37804,11 @@ def test_create_quota_rule_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_quota_rule + client._transport.create_host_group ] = mock_rpc request = {} - client.create_quota_rule(request) + client.create_host_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -34445,21 +37817,21 @@ def test_create_quota_rule_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_quota_rule(request) + client.create_host_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_quota_rule_rest_required_fields( - request_type=gcn_quota_rule.CreateQuotaRuleRequest, +def test_create_host_group_rest_required_fields( + request_type=gcn_host_group.CreateHostGroupRequest, ): transport_class = transports.NetAppRestTransport request_init = {} request_init["parent"] = "" - request_init["quota_rule_id"] = "" + request_init["host_group_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -34467,32 +37839,32 @@ def test_create_quota_rule_rest_required_fields( ) # verify fields with default values are dropped - assert "quotaRuleId" not in jsonified_request + assert "hostGroupId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_quota_rule._get_unset_required_fields(jsonified_request) + ).create_host_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "quotaRuleId" in jsonified_request - assert jsonified_request["quotaRuleId"] == request_init["quota_rule_id"] + assert "hostGroupId" in jsonified_request + assert jsonified_request["hostGroupId"] == request_init["host_group_id"] jsonified_request["parent"] = "parent_value" - jsonified_request["quotaRuleId"] = "quota_rule_id_value" + jsonified_request["hostGroupId"] = "host_group_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_quota_rule._get_unset_required_fields(jsonified_request) + ).create_host_group._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("quota_rule_id",)) + assert not set(unset_fields) - set(("host_group_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "quotaRuleId" in jsonified_request - assert jsonified_request["quotaRuleId"] == "quota_rule_id_value" + assert "hostGroupId" in jsonified_request + assert jsonified_request["hostGroupId"] == "host_group_id_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -34527,11 +37899,11 @@ def test_create_quota_rule_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_quota_rule(request) + response = client.create_host_group(request) expected_params = [ ( - "quotaRuleId", + "hostGroupId", "", ), ("$alt", "json;enum-encoding=int"), @@ -34540,25 +37912,25 @@ def test_create_quota_rule_rest_required_fields( assert expected_params == actual_params -def test_create_quota_rule_rest_unset_required_fields(): +def test_create_host_group_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_quota_rule._get_unset_required_fields({}) + unset_fields = transport.create_host_group._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("quotaRuleId",)) + set(("hostGroupId",)) & set( ( "parent", - "quotaRule", - "quotaRuleId", + "hostGroup", + "hostGroupId", ) ) ) -def test_create_quota_rule_rest_flattened(): +def test_create_host_group_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -34570,15 +37942,13 @@ def test_create_quota_rule_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/volumes/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( parent="parent_value", - quota_rule=gcn_quota_rule.QuotaRule(name="name_value"), - quota_rule_id="quota_rule_id_value", + host_group=gcn_host_group.HostGroup(name="name_value"), + host_group_id="host_group_id_value", ) mock_args.update(sample_request) @@ -34590,20 +37960,19 @@ def test_create_quota_rule_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_quota_rule(**mock_args) + client.create_host_group(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/volumes/*}/quotaRules" - % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/hostGroups" % client.transport._host, args[1], ) -def test_create_quota_rule_rest_flattened_error(transport: str = "rest"): +def test_create_host_group_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -34612,15 +37981,15 @@ def test_create_quota_rule_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_quota_rule( - gcn_quota_rule.CreateQuotaRuleRequest(), + client.create_host_group( + gcn_host_group.CreateHostGroupRequest(), parent="parent_value", - quota_rule=gcn_quota_rule.QuotaRule(name="name_value"), - quota_rule_id="quota_rule_id_value", + host_group=gcn_host_group.HostGroup(name="name_value"), + host_group_id="host_group_id_value", ) -def test_update_quota_rule_rest_use_cached_wrapped_rpc(): +def test_update_host_group_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -34634,7 +38003,7 @@ def test_update_quota_rule_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_quota_rule in client._transport._wrapped_methods + assert client._transport.update_host_group in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -34642,11 +38011,11 @@ def test_update_quota_rule_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_quota_rule + client._transport.update_host_group ] = mock_rpc request = {} - client.update_quota_rule(request) + client.update_host_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -34655,15 +38024,15 @@ def test_update_quota_rule_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_quota_rule(request) + client.update_host_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_quota_rule_rest_required_fields( - request_type=gcn_quota_rule.UpdateQuotaRuleRequest, +def test_update_host_group_rest_required_fields( + request_type=gcn_host_group.UpdateHostGroupRequest, ): transport_class = transports.NetAppRestTransport @@ -34678,14 +38047,14 @@ def test_update_quota_rule_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_quota_rule._get_unset_required_fields(jsonified_request) + ).update_host_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_quota_rule._get_unset_required_fields(jsonified_request) + ).update_host_group._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) @@ -34725,23 +38094,23 @@ def test_update_quota_rule_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_quota_rule(request) + response = client.update_host_group(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_quota_rule_rest_unset_required_fields(): +def test_update_host_group_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_quota_rule._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("quotaRule",))) + unset_fields = transport.update_host_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("hostGroup",))) -def test_update_quota_rule_rest_flattened(): +def test_update_host_group_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -34754,14 +38123,14 @@ def test_update_quota_rule_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "quota_rule": { - "name": "projects/sample1/locations/sample2/volumes/sample3/quotaRules/sample4" + "host_group": { + "name": "projects/sample1/locations/sample2/hostGroups/sample3" } } # get truthy value for each flattened field mock_args = dict( - quota_rule=gcn_quota_rule.QuotaRule(name="name_value"), + host_group=gcn_host_group.HostGroup(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -34774,20 +38143,20 @@ def test_update_quota_rule_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_quota_rule(**mock_args) + client.update_host_group(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{quota_rule.name=projects/*/locations/*/volumes/*/quotaRules/*}" + "%s/v1/{host_group.name=projects/*/locations/*/hostGroups/*}" % client.transport._host, args[1], ) -def test_update_quota_rule_rest_flattened_error(transport: str = "rest"): +def test_update_host_group_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -34796,14 +38165,14 @@ def test_update_quota_rule_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_quota_rule( - gcn_quota_rule.UpdateQuotaRuleRequest(), - quota_rule=gcn_quota_rule.QuotaRule(name="name_value"), + client.update_host_group( + gcn_host_group.UpdateHostGroupRequest(), + host_group=gcn_host_group.HostGroup(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_delete_quota_rule_rest_use_cached_wrapped_rpc(): +def test_delete_host_group_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -34817,7 +38186,7 @@ def test_delete_quota_rule_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_quota_rule in client._transport._wrapped_methods + assert client._transport.delete_host_group in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -34825,11 +38194,11 @@ def test_delete_quota_rule_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_quota_rule + client._transport.delete_host_group ] = mock_rpc request = {} - client.delete_quota_rule(request) + client.delete_host_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -34838,15 +38207,15 @@ def test_delete_quota_rule_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_quota_rule(request) + client.delete_host_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_quota_rule_rest_required_fields( - request_type=quota_rule.DeleteQuotaRuleRequest, +def test_delete_host_group_rest_required_fields( + request_type=host_group.DeleteHostGroupRequest, ): transport_class = transports.NetAppRestTransport @@ -34862,7 +38231,7 @@ def test_delete_quota_rule_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_quota_rule._get_unset_required_fields(jsonified_request) + ).delete_host_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -34871,7 +38240,7 @@ def test_delete_quota_rule_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_quota_rule._get_unset_required_fields(jsonified_request) + ).delete_host_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -34910,23 +38279,23 @@ def test_delete_quota_rule_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_quota_rule(request) + response = client.delete_host_group(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_quota_rule_rest_unset_required_fields(): +def test_delete_host_group_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_quota_rule._get_unset_required_fields({}) + unset_fields = transport.delete_host_group._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_quota_rule_rest_flattened(): +def test_delete_host_group_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -34939,7 +38308,7 @@ def test_delete_quota_rule_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/volumes/sample3/quotaRules/sample4" + "name": "projects/sample1/locations/sample2/hostGroups/sample3" } # get truthy value for each flattened field @@ -34956,20 +38325,19 @@ def test_delete_quota_rule_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_quota_rule(**mock_args) + client.delete_host_group(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/volumes/*/quotaRules/*}" - % client.transport._host, + "%s/v1/{name=projects/*/locations/*/hostGroups/*}" % client.transport._host, args[1], ) -def test_delete_quota_rule_rest_flattened_error(transport: str = "rest"): +def test_delete_host_group_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -34978,8 +38346,8 @@ def test_delete_quota_rule_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_quota_rule( - quota_rule.DeleteQuotaRuleRequest(), + client.delete_host_group( + host_group.DeleteHostGroupRequest(), name="name_value", ) @@ -36418,6 +39786,140 @@ def test_delete_quota_rule_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_restore_backup_files_empty_call_grpc(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.restore_backup_files), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.restore_backup_files(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = volume.RestoreBackupFilesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_host_groups_empty_call_grpc(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_host_groups), "__call__") as call: + call.return_value = host_group.ListHostGroupsResponse() + client.list_host_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = host_group.ListHostGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_host_group_empty_call_grpc(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_host_group), "__call__") as call: + call.return_value = host_group.HostGroup() + client.get_host_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = host_group.GetHostGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_host_group_empty_call_grpc(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_host_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_host_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_host_group.CreateHostGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_host_group_empty_call_grpc(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_host_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_host_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_host_group.UpdateHostGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_host_group_empty_call_grpc(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_host_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_host_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = host_group.DeleteHostGroupRequest() + + assert args[0] == request_msg + + def test_transport_kind_grpc_asyncio(): transport = NetAppAsyncClient.get_transport_class("grpc_asyncio")( credentials=async_anonymous_credentials() @@ -36532,6 +40034,7 @@ async def test_get_storage_pool_empty_call_grpc_asyncio(): available_throughput_mibps=0.2772, cold_tier_size_used_gib=2416, hot_tier_size_used_gib=2329, + type_=common.StoragePoolType.FILE, ) ) await client.get_storage_pool(request=None) @@ -37657,6 +41160,9 @@ async def test_get_backup_vault_empty_call_grpc_asyncio(): backup_region="backup_region_value", source_backup_vault="source_backup_vault_value", destination_backup_vault="destination_backup_vault_value", + kms_config="kms_config_value", + encryption_state=backup_vault.BackupVault.EncryptionState.ENCRYPTION_STATE_PENDING, + backups_crypto_key_version="backups_crypto_key_version_value", ) ) await client.get_backup_vault(request=None) @@ -38183,6 +41689,174 @@ async def test_delete_quota_rule_empty_call_grpc_asyncio(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_restore_backup_files_empty_call_grpc_asyncio(): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.restore_backup_files), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.restore_backup_files(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = volume.RestoreBackupFilesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_host_groups_empty_call_grpc_asyncio(): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_host_groups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + host_group.ListHostGroupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_host_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = host_group.ListHostGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_host_group_empty_call_grpc_asyncio(): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_host_group), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + host_group.HostGroup( + name="name_value", + type_=host_group.HostGroup.Type.ISCSI_INITIATOR, + state=host_group.HostGroup.State.CREATING, + hosts=["hosts_value"], + os_type=common.OsType.LINUX, + description="description_value", + ) + ) + await client.get_host_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = host_group.GetHostGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_host_group_empty_call_grpc_asyncio(): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_host_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_host_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_host_group.CreateHostGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_host_group_empty_call_grpc_asyncio(): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_host_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_host_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_host_group.UpdateHostGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_host_group_empty_call_grpc_asyncio(): + client = NetAppAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_host_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_host_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = host_group.DeleteHostGroupRequest() + + assert args[0] == request_msg + + def test_transport_kind_rest(): transport = NetAppClient.get_transport_class("rest")( credentials=ga_credentials.AnonymousCredentials() @@ -38393,6 +42067,7 @@ def test_create_storage_pool_rest_call_success(request_type): "available_throughput_mibps": 0.2772, "cold_tier_size_used_gib": 2416, "hot_tier_size_used_gib": 2329, + "type_": 1, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -38617,6 +42292,7 @@ def test_get_storage_pool_rest_call_success(request_type): available_throughput_mibps=0.2772, cold_tier_size_used_gib=2416, hot_tier_size_used_gib=2329, + type_=common.StoragePoolType.FILE, ) # Wrap the value into a proper Response obj @@ -38662,6 +42338,7 @@ def test_get_storage_pool_rest_call_success(request_type): assert math.isclose(response.available_throughput_mibps, 0.2772, rel_tol=1e-6) assert response.cold_tier_size_used_gib == 2416 assert response.hot_tier_size_used_gib == 2329 + assert response.type_ == common.StoragePoolType.FILE @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -38803,6 +42480,7 @@ def test_update_storage_pool_rest_call_success(request_type): "available_throughput_mibps": 0.2772, "cold_tier_size_used_gib": 2416, "hot_tier_size_used_gib": 2329, + "type_": 1, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -39775,7 +43453,44 @@ def test_create_volume_rest_call_success(request_type): "large_volume_constituent_count": 3241, }, "throughput_mibps": 0.1748, + "cache_parameters": { + "peer_volume_name": "peer_volume_name_value", + "peer_cluster_name": "peer_cluster_name_value", + "peer_svm_name": "peer_svm_name_value", + "peer_ip_addresses": [ + "peer_ip_addresses_value1", + "peer_ip_addresses_value2", + ], + "enable_global_file_lock": True, + "cache_config": { + "cache_pre_populate": { + "path_list": ["path_list_value1", "path_list_value2"], + "exclude_path_list": [ + "exclude_path_list_value1", + "exclude_path_list_value2", + ], + "recursion": True, + }, + "writeback_enabled": True, + "cifs_change_notify_enabled": True, + "cache_pre_populate_state": 1, + }, + "cache_state": 1, + "command": "command_value", + "peering_command_expiry_time": {}, + "passphrase": "passphrase_value", + "state_details": "state_details_value", + }, "hot_tier_size_used_gib": 2329, + "block_devices": [ + { + "name": "name_value", + "host_groups": ["host_groups_value1", "host_groups_value2"], + "identifier": "identifier_value", + "size_gib": 844, + "os_type": 1, + } + ], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -40081,7 +43796,44 @@ def test_update_volume_rest_call_success(request_type): "large_volume_constituent_count": 3241, }, "throughput_mibps": 0.1748, + "cache_parameters": { + "peer_volume_name": "peer_volume_name_value", + "peer_cluster_name": "peer_cluster_name_value", + "peer_svm_name": "peer_svm_name_value", + "peer_ip_addresses": [ + "peer_ip_addresses_value1", + "peer_ip_addresses_value2", + ], + "enable_global_file_lock": True, + "cache_config": { + "cache_pre_populate": { + "path_list": ["path_list_value1", "path_list_value2"], + "exclude_path_list": [ + "exclude_path_list_value1", + "exclude_path_list_value2", + ], + "recursion": True, + }, + "writeback_enabled": True, + "cifs_change_notify_enabled": True, + "cache_pre_populate_state": 1, + }, + "cache_state": 1, + "command": "command_value", + "peering_command_expiry_time": {}, + "passphrase": "passphrase_value", + "state_details": "state_details_value", + }, "hot_tier_size_used_gib": 2329, + "block_devices": [ + { + "name": "name_value", + "host_groups": ["host_groups_value1", "host_groups_value2"], + "identifier": "identifier_value", + "size_gib": 844, + "os_type": 1, + } + ], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -44453,14 +48205,501 @@ def test_establish_peering_rest_call_success(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.establish_peering(request) + response = client.establish_peering(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_establish_peering_rest_interceptors(null_interceptor): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), + ) + client = NetAppClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_establish_peering" + ) as post, mock.patch.object( + transports.NetAppRestInterceptor, "post_establish_peering_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.NetAppRestInterceptor, "pre_establish_peering" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = replication.EstablishPeeringRequest.pb( + replication.EstablishPeeringRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = replication.EstablishPeeringRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.establish_peering( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_sync_replication_rest_bad_request( + request_type=replication.SyncReplicationRequest, +): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.sync_replication(request) + + +@pytest.mark.parametrize( + "request_type", + [ + replication.SyncReplicationRequest, + dict, + ], +) +def test_sync_replication_rest_call_success(request_type): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.sync_replication(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_sync_replication_rest_interceptors(null_interceptor): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), + ) + client = NetAppClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_sync_replication" + ) as post, mock.patch.object( + transports.NetAppRestInterceptor, "post_sync_replication_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.NetAppRestInterceptor, "pre_sync_replication" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = replication.SyncReplicationRequest.pb( + replication.SyncReplicationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = replication.SyncReplicationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.sync_replication( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_backup_vault_rest_bad_request( + request_type=gcn_backup_vault.CreateBackupVaultRequest, +): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_backup_vault(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_backup_vault.CreateBackupVaultRequest, + dict, + ], +) +def test_create_backup_vault_rest_call_success(request_type): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["backup_vault"] = { + "name": "name_value", + "state": 1, + "create_time": {"seconds": 751, "nanos": 543}, + "description": "description_value", + "labels": {}, + "backup_vault_type": 1, + "source_region": "source_region_value", + "backup_region": "backup_region_value", + "source_backup_vault": "source_backup_vault_value", + "destination_backup_vault": "destination_backup_vault_value", + "backup_retention_policy": { + "backup_minimum_enforced_retention_days": 4029, + "daily_backup_immutable": True, + "weekly_backup_immutable": True, + "monthly_backup_immutable": True, + "manual_backup_immutable": True, + }, + "kms_config": "kms_config_value", + "encryption_state": 1, + "backups_crypto_key_version": "backups_crypto_key_version_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_backup_vault.CreateBackupVaultRequest.meta.fields["backup_vault"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_vault"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_vault"][field])): + del request_init["backup_vault"][field][i][subfield] + else: + del request_init["backup_vault"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_backup_vault(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_backup_vault_rest_interceptors(null_interceptor): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), + ) + client = NetAppClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_create_backup_vault" + ) as post, mock.patch.object( + transports.NetAppRestInterceptor, "post_create_backup_vault_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.NetAppRestInterceptor, "pre_create_backup_vault" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcn_backup_vault.CreateBackupVaultRequest.pb( + gcn_backup_vault.CreateBackupVaultRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = gcn_backup_vault.CreateBackupVaultRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_backup_vault( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_backup_vault_rest_bad_request( + request_type=backup_vault.GetBackupVaultRequest, +): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_backup_vault(request) + + +@pytest.mark.parametrize( + "request_type", + [ + backup_vault.GetBackupVaultRequest, + dict, + ], +) +def test_get_backup_vault_rest_call_success(request_type): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backup_vault.BackupVault( + name="name_value", + state=backup_vault.BackupVault.State.CREATING, + description="description_value", + backup_vault_type=backup_vault.BackupVault.BackupVaultType.IN_REGION, + source_region="source_region_value", + backup_region="backup_region_value", + source_backup_vault="source_backup_vault_value", + destination_backup_vault="destination_backup_vault_value", + kms_config="kms_config_value", + encryption_state=backup_vault.BackupVault.EncryptionState.ENCRYPTION_STATE_PENDING, + backups_crypto_key_version="backups_crypto_key_version_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup_vault.BackupVault.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_backup_vault(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, backup_vault.BackupVault) + assert response.name == "name_value" + assert response.state == backup_vault.BackupVault.State.CREATING + assert response.description == "description_value" + assert ( + response.backup_vault_type == backup_vault.BackupVault.BackupVaultType.IN_REGION + ) + assert response.source_region == "source_region_value" + assert response.backup_region == "backup_region_value" + assert response.source_backup_vault == "source_backup_vault_value" + assert response.destination_backup_vault == "destination_backup_vault_value" + assert response.kms_config == "kms_config_value" + assert ( + response.encryption_state + == backup_vault.BackupVault.EncryptionState.ENCRYPTION_STATE_PENDING + ) + assert response.backups_crypto_key_version == "backups_crypto_key_version_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_establish_peering_rest_interceptors(null_interceptor): +def test_get_backup_vault_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -44472,19 +48711,17 @@ def test_establish_peering_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.NetAppRestInterceptor, "post_establish_peering" + transports.NetAppRestInterceptor, "post_get_backup_vault" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "post_establish_peering_with_metadata" + transports.NetAppRestInterceptor, "post_get_backup_vault_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.NetAppRestInterceptor, "pre_establish_peering" + transports.NetAppRestInterceptor, "pre_get_backup_vault" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = replication.EstablishPeeringRequest.pb( - replication.EstablishPeeringRequest() + pb_message = backup_vault.GetBackupVaultRequest.pb( + backup_vault.GetBackupVaultRequest() ) transcode.return_value = { "method": "post", @@ -44496,19 +48733,19 @@ def test_establish_peering_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = backup_vault.BackupVault.to_json(backup_vault.BackupVault()) req.return_value.content = return_value - request = replication.EstablishPeeringRequest() + request = backup_vault.GetBackupVaultRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = backup_vault.BackupVault() + post_with_metadata.return_value = backup_vault.BackupVault(), metadata - client.establish_peering( + client.get_backup_vault( request, metadata=[ ("key", "val"), @@ -44521,16 +48758,14 @@ def test_establish_peering_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_sync_replication_rest_bad_request( - request_type=replication.SyncReplicationRequest, +def test_list_backup_vaults_rest_bad_request( + request_type=backup_vault.ListBackupVaultsRequest, ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -44545,47 +48780,53 @@ def test_sync_replication_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.sync_replication(request) + client.list_backup_vaults(request) @pytest.mark.parametrize( "request_type", [ - replication.SyncReplicationRequest, + backup_vault.ListBackupVaultsRequest, dict, ], ) -def test_sync_replication_rest_call_success(request_type): +def test_list_backup_vaults_rest_call_success(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backup_vault.ListBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup_vault.ListBackupVaultsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.sync_replication(request) + response = client.list_backup_vaults(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, pagers.ListBackupVaultsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_sync_replication_rest_interceptors(null_interceptor): +def test_list_backup_vaults_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -44597,19 +48838,17 @@ def test_sync_replication_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.NetAppRestInterceptor, "post_sync_replication" + transports.NetAppRestInterceptor, "post_list_backup_vaults" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "post_sync_replication_with_metadata" + transports.NetAppRestInterceptor, "post_list_backup_vaults_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.NetAppRestInterceptor, "pre_sync_replication" + transports.NetAppRestInterceptor, "pre_list_backup_vaults" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = replication.SyncReplicationRequest.pb( - replication.SyncReplicationRequest() + pb_message = backup_vault.ListBackupVaultsRequest.pb( + backup_vault.ListBackupVaultsRequest() ) transcode.return_value = { "method": "post", @@ -44621,19 +48860,24 @@ def test_sync_replication_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = backup_vault.ListBackupVaultsResponse.to_json( + backup_vault.ListBackupVaultsResponse() + ) req.return_value.content = return_value - request = replication.SyncReplicationRequest() + request = backup_vault.ListBackupVaultsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = backup_vault.ListBackupVaultsResponse() + post_with_metadata.return_value = ( + backup_vault.ListBackupVaultsResponse(), + metadata, + ) - client.sync_replication( + client.list_backup_vaults( request, metadata=[ ("key", "val"), @@ -44646,14 +48890,18 @@ def test_sync_replication_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_backup_vault_rest_bad_request( - request_type=gcn_backup_vault.CreateBackupVaultRequest, +def test_update_backup_vault_rest_bad_request( + request_type=gcn_backup_vault.UpdateBackupVaultRequest, ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "backup_vault": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -44668,25 +48916,29 @@ def test_create_backup_vault_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_backup_vault(request) + client.update_backup_vault(request) @pytest.mark.parametrize( "request_type", [ - gcn_backup_vault.CreateBackupVaultRequest, + gcn_backup_vault.UpdateBackupVaultRequest, dict, ], ) -def test_create_backup_vault_rest_call_success(request_type): +def test_update_backup_vault_rest_call_success(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "backup_vault": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } + } request_init["backup_vault"] = { - "name": "name_value", + "name": "projects/sample1/locations/sample2/backupVaults/sample3", "state": 1, "create_time": {"seconds": 751, "nanos": 543}, "description": "description_value", @@ -44703,13 +48955,16 @@ def test_create_backup_vault_rest_call_success(request_type): "monthly_backup_immutable": True, "manual_backup_immutable": True, }, + "kms_config": "kms_config_value", + "encryption_state": 1, + "backups_crypto_key_version": "backups_crypto_key_version_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = gcn_backup_vault.CreateBackupVaultRequest.meta.fields["backup_vault"] + test_field = gcn_backup_vault.UpdateBackupVaultRequest.meta.fields["backup_vault"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -44785,14 +49040,14 @@ def get_message_fields(field): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_backup_vault(request) + response = client.update_backup_vault(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_backup_vault_rest_interceptors(null_interceptor): +def test_update_backup_vault_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -44806,17 +49061,17 @@ def test_create_backup_vault_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.NetAppRestInterceptor, "post_create_backup_vault" + transports.NetAppRestInterceptor, "post_update_backup_vault" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "post_create_backup_vault_with_metadata" + transports.NetAppRestInterceptor, "post_update_backup_vault_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.NetAppRestInterceptor, "pre_create_backup_vault" + transports.NetAppRestInterceptor, "pre_update_backup_vault" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = gcn_backup_vault.CreateBackupVaultRequest.pb( - gcn_backup_vault.CreateBackupVaultRequest() + pb_message = gcn_backup_vault.UpdateBackupVaultRequest.pb( + gcn_backup_vault.UpdateBackupVaultRequest() ) transcode.return_value = { "method": "post", @@ -44831,7 +49086,7 @@ def test_create_backup_vault_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = gcn_backup_vault.CreateBackupVaultRequest() + request = gcn_backup_vault.UpdateBackupVaultRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -44840,7 +49095,7 @@ def test_create_backup_vault_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.create_backup_vault( + client.update_backup_vault( request, metadata=[ ("key", "val"), @@ -44853,8 +49108,8 @@ def test_create_backup_vault_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_backup_vault_rest_bad_request( - request_type=backup_vault.GetBackupVaultRequest, +def test_delete_backup_vault_rest_bad_request( + request_type=backup_vault.DeleteBackupVaultRequest, ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -44875,17 +49130,17 @@ def test_get_backup_vault_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_backup_vault(request) + client.delete_backup_vault(request) @pytest.mark.parametrize( "request_type", [ - backup_vault.GetBackupVaultRequest, + backup_vault.DeleteBackupVaultRequest, dict, ], ) -def test_get_backup_vault_rest_call_success(request_type): +def test_delete_backup_vault_rest_call_success(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -44897,45 +49152,23 @@ def test_get_backup_vault_rest_call_success(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backup_vault.BackupVault( - name="name_value", - state=backup_vault.BackupVault.State.CREATING, - description="description_value", - backup_vault_type=backup_vault.BackupVault.BackupVaultType.IN_REGION, - source_region="source_region_value", - backup_region="backup_region_value", - source_backup_vault="source_backup_vault_value", - destination_backup_vault="destination_backup_vault_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup_vault.BackupVault.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_backup_vault(request) + response = client.delete_backup_vault(request) # Establish that the response is the type that we expect. - assert isinstance(response, backup_vault.BackupVault) - assert response.name == "name_value" - assert response.state == backup_vault.BackupVault.State.CREATING - assert response.description == "description_value" - assert ( - response.backup_vault_type == backup_vault.BackupVault.BackupVaultType.IN_REGION - ) - assert response.source_region == "source_region_value" - assert response.backup_region == "backup_region_value" - assert response.source_backup_vault == "source_backup_vault_value" - assert response.destination_backup_vault == "destination_backup_vault_value" + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_backup_vault_rest_interceptors(null_interceptor): +def test_delete_backup_vault_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -44947,17 +49180,19 @@ def test_get_backup_vault_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.NetAppRestInterceptor, "post_get_backup_vault" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_delete_backup_vault" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "post_get_backup_vault_with_metadata" + transports.NetAppRestInterceptor, "post_delete_backup_vault_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.NetAppRestInterceptor, "pre_get_backup_vault" + transports.NetAppRestInterceptor, "pre_delete_backup_vault" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backup_vault.GetBackupVaultRequest.pb( - backup_vault.GetBackupVaultRequest() + pb_message = backup_vault.DeleteBackupVaultRequest.pb( + backup_vault.DeleteBackupVaultRequest() ) transcode.return_value = { "method": "post", @@ -44969,19 +49204,19 @@ def test_get_backup_vault_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backup_vault.BackupVault.to_json(backup_vault.BackupVault()) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = backup_vault.GetBackupVaultRequest() + request = backup_vault.DeleteBackupVaultRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backup_vault.BackupVault() - post_with_metadata.return_value = backup_vault.BackupVault(), metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.get_backup_vault( + client.delete_backup_vault( request, metadata=[ ("key", "val"), @@ -44994,14 +49229,12 @@ def test_get_backup_vault_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_backup_vaults_rest_bad_request( - request_type=backup_vault.ListBackupVaultsRequest, -): +def test_create_backup_rest_bad_request(request_type=gcn_backup.CreateBackupRequest): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"parent": "projects/sample1/locations/sample2/backupVaults/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -45016,53 +49249,129 @@ def test_list_backup_vaults_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backup_vaults(request) + client.create_backup(request) @pytest.mark.parametrize( "request_type", [ - backup_vault.ListBackupVaultsRequest, + gcn_backup.CreateBackupRequest, dict, ], ) -def test_list_backup_vaults_rest_call_success(request_type): +def test_create_backup_rest_call_success(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"parent": "projects/sample1/locations/sample2/backupVaults/sample3"} + request_init["backup"] = { + "name": "name_value", + "state": 1, + "description": "description_value", + "volume_usage_bytes": 1938, + "backup_type": 1, + "source_volume": "source_volume_value", + "source_snapshot": "source_snapshot_value", + "create_time": {"seconds": 751, "nanos": 543}, + "labels": {}, + "chain_storage_bytes": 2013, + "satisfies_pzs": True, + "satisfies_pzi": True, + "volume_region": "volume_region_value", + "backup_region": "backup_region_value", + "enforced_retention_end_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_backup.CreateBackupRequest.meta.fields["backup"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup"][field])): + del request_init["backup"][field][i][subfield] + else: + del request_init["backup"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backup_vault.ListBackupVaultsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup_vault.ListBackupVaultsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backup_vaults(request) + response = client.create_backup(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupVaultsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_backup_vaults_rest_interceptors(null_interceptor): +def test_create_backup_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -45074,18 +49383,18 @@ def test_list_backup_vaults_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.NetAppRestInterceptor, "post_list_backup_vaults" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_create_backup" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "post_list_backup_vaults_with_metadata" + transports.NetAppRestInterceptor, "post_create_backup_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.NetAppRestInterceptor, "pre_list_backup_vaults" + transports.NetAppRestInterceptor, "pre_create_backup" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backup_vault.ListBackupVaultsRequest.pb( - backup_vault.ListBackupVaultsRequest() - ) + pb_message = gcn_backup.CreateBackupRequest.pb(gcn_backup.CreateBackupRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -45096,24 +49405,19 @@ def test_list_backup_vaults_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backup_vault.ListBackupVaultsResponse.to_json( - backup_vault.ListBackupVaultsResponse() - ) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = backup_vault.ListBackupVaultsRequest() + request = gcn_backup.CreateBackupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backup_vault.ListBackupVaultsResponse() - post_with_metadata.return_value = ( - backup_vault.ListBackupVaultsResponse(), - metadata, - ) + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.list_backup_vaults( + client.create_backup( request, metadata=[ ("key", "val"), @@ -45126,17 +49430,13 @@ def test_list_backup_vaults_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_update_backup_vault_rest_bad_request( - request_type=gcn_backup_vault.UpdateBackupVaultRequest, -): +def test_get_backup_rest_bad_request(request_type=backup.GetBackupRequest): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "backup_vault": { - "name": "projects/sample1/locations/sample2/backupVaults/sample3" - } + "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4" } request = request_type(**request_init) @@ -45152,135 +49452,198 @@ def test_update_backup_vault_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_backup_vault(request) + client.get_backup(request) @pytest.mark.parametrize( "request_type", [ - gcn_backup_vault.UpdateBackupVaultRequest, + backup.GetBackupRequest, dict, ], ) -def test_update_backup_vault_rest_call_success(request_type): +def test_get_backup_rest_call_success(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "backup_vault": { - "name": "projects/sample1/locations/sample2/backupVaults/sample3" - } - } - request_init["backup_vault"] = { - "name": "projects/sample1/locations/sample2/backupVaults/sample3", - "state": 1, - "create_time": {"seconds": 751, "nanos": 543}, - "description": "description_value", - "labels": {}, - "backup_vault_type": 1, - "source_region": "source_region_value", - "backup_region": "backup_region_value", - "source_backup_vault": "source_backup_vault_value", - "destination_backup_vault": "destination_backup_vault_value", - "backup_retention_policy": { - "backup_minimum_enforced_retention_days": 4029, - "daily_backup_immutable": True, - "weekly_backup_immutable": True, - "monthly_backup_immutable": True, - "manual_backup_immutable": True, - }, + "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + request = request_type(**request_init) - # Determine if the message type is proto-plus or protobuf - test_field = gcn_backup_vault.UpdateBackupVaultRequest.meta.fields["backup_vault"] + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backup.Backup( + name="name_value", + state=backup.Backup.State.CREATING, + description="description_value", + volume_usage_bytes=1938, + backup_type=backup.Backup.Type.MANUAL, + source_volume="source_volume_value", + source_snapshot="source_snapshot_value", + chain_storage_bytes=2013, + satisfies_pzs=True, + satisfies_pzi=True, + volume_region="volume_region_value", + backup_region="backup_region_value", + ) - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + # Convert return value to protobuf type + return_value = backup.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_backup(request) - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + # Establish that the response is the type that we expect. + assert isinstance(response, backup.Backup) + assert response.name == "name_value" + assert response.state == backup.Backup.State.CREATING + assert response.description == "description_value" + assert response.volume_usage_bytes == 1938 + assert response.backup_type == backup.Backup.Type.MANUAL + assert response.source_volume == "source_volume_value" + assert response.source_snapshot == "source_snapshot_value" + assert response.chain_storage_bytes == 2013 + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True + assert response.volume_region == "volume_region_value" + assert response.backup_region == "backup_region_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backup_rest_interceptors(null_interceptor): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), + ) + client = NetAppClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetAppRestInterceptor, "post_get_backup" + ) as post, mock.patch.object( + transports.NetAppRestInterceptor, "post_get_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.NetAppRestInterceptor, "pre_get_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = backup.GetBackupRequest.pb(backup.GetBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = backup.Backup.to_json(backup.Backup()) + req.return_value.content = return_value + + request = backup.GetBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backup.Backup() + post_with_metadata.return_value = backup.Backup(), metadata + + client.get_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - subfields_not_in_runtime = [] +def test_list_backups_rest_bad_request(request_type=backup.ListBackupsRequest): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup_vault"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_backups(request) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["backup_vault"][field])): - del request_init["backup_vault"][field][i][subfield] - else: - del request_init["backup_vault"][field][subfield] +@pytest.mark.parametrize( + "request_type", + [ + backup.ListBackupsRequest, + dict, + ], +) +def test_list_backups_rest_call_success(request_type): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/backupVaults/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backup.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup.ListBackupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_backup_vault(request) + response = client.list_backups(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_backup_vault_rest_interceptors(null_interceptor): +def test_list_backups_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -45292,20 +49655,16 @@ def test_update_backup_vault_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.NetAppRestInterceptor, "post_update_backup_vault" + transports.NetAppRestInterceptor, "post_list_backups" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "post_update_backup_vault_with_metadata" + transports.NetAppRestInterceptor, "post_list_backups_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.NetAppRestInterceptor, "pre_update_backup_vault" + transports.NetAppRestInterceptor, "pre_list_backups" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = gcn_backup_vault.UpdateBackupVaultRequest.pb( - gcn_backup_vault.UpdateBackupVaultRequest() - ) + pb_message = backup.ListBackupsRequest.pb(backup.ListBackupsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -45316,19 +49675,19 @@ def test_update_backup_vault_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = backup.ListBackupsResponse.to_json(backup.ListBackupsResponse()) req.return_value.content = return_value - request = gcn_backup_vault.UpdateBackupVaultRequest() + request = backup.ListBackupsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = backup.ListBackupsResponse() + post_with_metadata.return_value = backup.ListBackupsResponse(), metadata - client.update_backup_vault( + client.list_backups( request, metadata=[ ("key", "val"), @@ -45341,14 +49700,14 @@ def test_update_backup_vault_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_backup_vault_rest_bad_request( - request_type=backup_vault.DeleteBackupVaultRequest, -): +def test_delete_backup_rest_bad_request(request_type=backup.DeleteBackupRequest): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -45363,23 +49722,25 @@ def test_delete_backup_vault_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_backup_vault(request) + client.delete_backup(request) @pytest.mark.parametrize( "request_type", [ - backup_vault.DeleteBackupVaultRequest, + backup.DeleteBackupRequest, dict, ], ) -def test_delete_backup_vault_rest_call_success(request_type): +def test_delete_backup_rest_call_success(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -45394,14 +49755,14 @@ def test_delete_backup_vault_rest_call_success(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_backup_vault(request) + response = client.delete_backup(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_backup_vault_rest_interceptors(null_interceptor): +def test_delete_backup_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -45415,18 +49776,16 @@ def test_delete_backup_vault_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.NetAppRestInterceptor, "post_delete_backup_vault" + transports.NetAppRestInterceptor, "post_delete_backup" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "post_delete_backup_vault_with_metadata" + transports.NetAppRestInterceptor, "post_delete_backup_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.NetAppRestInterceptor, "pre_delete_backup_vault" + transports.NetAppRestInterceptor, "pre_delete_backup" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backup_vault.DeleteBackupVaultRequest.pb( - backup_vault.DeleteBackupVaultRequest() - ) + pb_message = backup.DeleteBackupRequest.pb(backup.DeleteBackupRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -45440,7 +49799,7 @@ def test_delete_backup_vault_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = backup_vault.DeleteBackupVaultRequest() + request = backup.DeleteBackupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -45449,7 +49808,7 @@ def test_delete_backup_vault_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.delete_backup_vault( + client.delete_backup( request, metadata=[ ("key", "val"), @@ -45462,12 +49821,16 @@ def test_delete_backup_vault_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_backup_rest_bad_request(request_type=gcn_backup.CreateBackupRequest): +def test_update_backup_rest_bad_request(request_type=gcn_backup.UpdateBackupRequest): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/backupVaults/sample3"} + request_init = { + "backup": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -45482,25 +49845,29 @@ def test_create_backup_rest_bad_request(request_type=gcn_backup.CreateBackupRequ response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_backup(request) + client.update_backup(request) @pytest.mark.parametrize( "request_type", [ - gcn_backup.CreateBackupRequest, + gcn_backup.UpdateBackupRequest, dict, ], ) -def test_create_backup_rest_call_success(request_type): +def test_update_backup_rest_call_success(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/backupVaults/sample3"} + request_init = { + "backup": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4" + } + } request_init["backup"] = { - "name": "name_value", + "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4", "state": 1, "description": "description_value", "volume_usage_bytes": 1938, @@ -45521,7 +49888,7 @@ def test_create_backup_rest_call_success(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = gcn_backup.CreateBackupRequest.meta.fields["backup"] + test_field = gcn_backup.UpdateBackupRequest.meta.fields["backup"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -45597,14 +49964,14 @@ def get_message_fields(field): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_backup(request) + response = client.update_backup(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_backup_rest_interceptors(null_interceptor): +def test_update_backup_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -45618,16 +49985,16 @@ def test_create_backup_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.NetAppRestInterceptor, "post_create_backup" + transports.NetAppRestInterceptor, "post_update_backup" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "post_create_backup_with_metadata" + transports.NetAppRestInterceptor, "post_update_backup_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.NetAppRestInterceptor, "pre_create_backup" + transports.NetAppRestInterceptor, "pre_update_backup" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = gcn_backup.CreateBackupRequest.pb(gcn_backup.CreateBackupRequest()) + pb_message = gcn_backup.UpdateBackupRequest.pb(gcn_backup.UpdateBackupRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -45641,7 +50008,7 @@ def test_create_backup_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = gcn_backup.CreateBackupRequest() + request = gcn_backup.UpdateBackupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -45650,7 +50017,7 @@ def test_create_backup_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.create_backup( + client.update_backup( request, metadata=[ ("key", "val"), @@ -45663,14 +50030,14 @@ def test_create_backup_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_backup_rest_bad_request(request_type=backup.GetBackupRequest): +def test_create_backup_policy_rest_bad_request( + request_type=gcn_backup_policy.CreateBackupPolicyRequest, +): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -45685,75 +50052,126 @@ def test_get_backup_rest_bad_request(request_type=backup.GetBackupRequest): response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_backup(request) + client.create_backup_policy(request) @pytest.mark.parametrize( "request_type", [ - backup.GetBackupRequest, + gcn_backup_policy.CreateBackupPolicyRequest, dict, ], ) -def test_get_backup_rest_call_success(request_type): +def test_create_backup_policy_rest_call_success(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4" + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["backup_policy"] = { + "name": "name_value", + "daily_backup_limit": 1894, + "weekly_backup_limit": 2020, + "monthly_backup_limit": 2142, + "description": "description_value", + "enabled": True, + "assigned_volume_count": 2253, + "create_time": {"seconds": 751, "nanos": 543}, + "labels": {}, + "state": 1, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_backup_policy.CreateBackupPolicyRequest.meta.fields[ + "backup_policy" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_policy"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_policy"][field])): + del request_init["backup_policy"][field][i][subfield] + else: + del request_init["backup_policy"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backup.Backup( - name="name_value", - state=backup.Backup.State.CREATING, - description="description_value", - volume_usage_bytes=1938, - backup_type=backup.Backup.Type.MANUAL, - source_volume="source_volume_value", - source_snapshot="source_snapshot_value", - chain_storage_bytes=2013, - satisfies_pzs=True, - satisfies_pzi=True, - volume_region="volume_region_value", - backup_region="backup_region_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup.Backup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_backup(request) + response = client.create_backup_policy(request) # Establish that the response is the type that we expect. - assert isinstance(response, backup.Backup) - assert response.name == "name_value" - assert response.state == backup.Backup.State.CREATING - assert response.description == "description_value" - assert response.volume_usage_bytes == 1938 - assert response.backup_type == backup.Backup.Type.MANUAL - assert response.source_volume == "source_volume_value" - assert response.source_snapshot == "source_snapshot_value" - assert response.chain_storage_bytes == 2013 - assert response.satisfies_pzs is True - assert response.satisfies_pzi is True - assert response.volume_region == "volume_region_value" - assert response.backup_region == "backup_region_value" + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_backup_rest_interceptors(null_interceptor): +def test_create_backup_policy_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -45765,16 +50183,20 @@ def test_get_backup_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.NetAppRestInterceptor, "post_get_backup" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_create_backup_policy" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "post_get_backup_with_metadata" + transports.NetAppRestInterceptor, "post_create_backup_policy_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.NetAppRestInterceptor, "pre_get_backup" + transports.NetAppRestInterceptor, "pre_create_backup_policy" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backup.GetBackupRequest.pb(backup.GetBackupRequest()) + pb_message = gcn_backup_policy.CreateBackupPolicyRequest.pb( + gcn_backup_policy.CreateBackupPolicyRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -45785,19 +50207,19 @@ def test_get_backup_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backup.Backup.to_json(backup.Backup()) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = backup.GetBackupRequest() + request = gcn_backup_policy.CreateBackupPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backup.Backup() - post_with_metadata.return_value = backup.Backup(), metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.get_backup( + client.create_backup_policy( request, metadata=[ ("key", "val"), @@ -45810,12 +50232,14 @@ def test_get_backup_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_backups_rest_bad_request(request_type=backup.ListBackupsRequest): +def test_get_backup_policy_rest_bad_request( + request_type=backup_policy.GetBackupPolicyRequest, +): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/backupVaults/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/backupPolicies/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -45830,31 +50254,37 @@ def test_list_backups_rest_bad_request(request_type=backup.ListBackupsRequest): response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backups(request) + client.get_backup_policy(request) @pytest.mark.parametrize( "request_type", [ - backup.ListBackupsRequest, + backup_policy.GetBackupPolicyRequest, dict, ], ) -def test_list_backups_rest_call_success(request_type): +def test_get_backup_policy_rest_call_success(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/backupVaults/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/backupPolicies/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backup.ListBackupsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = backup_policy.BackupPolicy( + name="name_value", + daily_backup_limit=1894, + weekly_backup_limit=2020, + monthly_backup_limit=2142, + description="description_value", + enabled=True, + assigned_volume_count=2253, + state=backup_policy.BackupPolicy.State.CREATING, ) # Wrap the value into a proper Response obj @@ -45862,21 +50292,27 @@ def test_list_backups_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = backup.ListBackupsResponse.pb(return_value) + return_value = backup_policy.BackupPolicy.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backups(request) + response = client.get_backup_policy(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, backup_policy.BackupPolicy) + assert response.name == "name_value" + assert response.daily_backup_limit == 1894 + assert response.weekly_backup_limit == 2020 + assert response.monthly_backup_limit == 2142 + assert response.description == "description_value" + assert response.enabled is True + assert response.assigned_volume_count == 2253 + assert response.state == backup_policy.BackupPolicy.State.CREATING @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_backups_rest_interceptors(null_interceptor): +def test_get_backup_policy_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -45888,16 +50324,18 @@ def test_list_backups_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.NetAppRestInterceptor, "post_list_backups" + transports.NetAppRestInterceptor, "post_get_backup_policy" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "post_list_backups_with_metadata" + transports.NetAppRestInterceptor, "post_get_backup_policy_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.NetAppRestInterceptor, "pre_list_backups" + transports.NetAppRestInterceptor, "pre_get_backup_policy" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backup.ListBackupsRequest.pb(backup.ListBackupsRequest()) + pb_message = backup_policy.GetBackupPolicyRequest.pb( + backup_policy.GetBackupPolicyRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -45908,19 +50346,19 @@ def test_list_backups_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backup.ListBackupsResponse.to_json(backup.ListBackupsResponse()) + return_value = backup_policy.BackupPolicy.to_json(backup_policy.BackupPolicy()) req.return_value.content = return_value - request = backup.ListBackupsRequest() + request = backup_policy.GetBackupPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backup.ListBackupsResponse() - post_with_metadata.return_value = backup.ListBackupsResponse(), metadata + post.return_value = backup_policy.BackupPolicy() + post_with_metadata.return_value = backup_policy.BackupPolicy(), metadata - client.list_backups( + client.get_backup_policy( request, metadata=[ ("key", "val"), @@ -45933,14 +50371,14 @@ def test_list_backups_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_backup_rest_bad_request(request_type=backup.DeleteBackupRequest): +def test_list_backup_policies_rest_bad_request( + request_type=backup_policy.ListBackupPoliciesRequest, +): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -45955,47 +50393,53 @@ def test_delete_backup_rest_bad_request(request_type=backup.DeleteBackupRequest) response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_backup(request) + client.list_backup_policies(request) @pytest.mark.parametrize( "request_type", [ - backup.DeleteBackupRequest, + backup_policy.ListBackupPoliciesRequest, dict, ], ) -def test_delete_backup_rest_call_success(request_type): +def test_list_backup_policies_rest_call_success(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backup_policy.ListBackupPoliciesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup_policy.ListBackupPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_backup(request) + response = client.list_backup_policies(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, pagers.ListBackupPoliciesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_backup_rest_interceptors(null_interceptor): +def test_list_backup_policies_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -46007,18 +50451,18 @@ def test_delete_backup_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.NetAppRestInterceptor, "post_delete_backup" + transports.NetAppRestInterceptor, "post_list_backup_policies" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "post_delete_backup_with_metadata" + transports.NetAppRestInterceptor, "post_list_backup_policies_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.NetAppRestInterceptor, "pre_delete_backup" + transports.NetAppRestInterceptor, "pre_list_backup_policies" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backup.DeleteBackupRequest.pb(backup.DeleteBackupRequest()) + pb_message = backup_policy.ListBackupPoliciesRequest.pb( + backup_policy.ListBackupPoliciesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -46029,19 +50473,24 @@ def test_delete_backup_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = backup_policy.ListBackupPoliciesResponse.to_json( + backup_policy.ListBackupPoliciesResponse() + ) req.return_value.content = return_value - request = backup.DeleteBackupRequest() + request = backup_policy.ListBackupPoliciesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = backup_policy.ListBackupPoliciesResponse() + post_with_metadata.return_value = ( + backup_policy.ListBackupPoliciesResponse(), + metadata, + ) - client.delete_backup( + client.list_backup_policies( request, metadata=[ ("key", "val"), @@ -46054,14 +50503,16 @@ def test_delete_backup_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_update_backup_rest_bad_request(request_type=gcn_backup.UpdateBackupRequest): +def test_update_backup_policy_rest_bad_request( + request_type=gcn_backup_policy.UpdateBackupPolicyRequest, +): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "backup": { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4" + "backup_policy": { + "name": "projects/sample1/locations/sample2/backupPolicies/sample3" } } request = request_type(**request_init) @@ -46078,50 +50529,47 @@ def test_update_backup_rest_bad_request(request_type=gcn_backup.UpdateBackupRequ response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_backup(request) + client.update_backup_policy(request) @pytest.mark.parametrize( "request_type", [ - gcn_backup.UpdateBackupRequest, + gcn_backup_policy.UpdateBackupPolicyRequest, dict, ], ) -def test_update_backup_rest_call_success(request_type): +def test_update_backup_policy_rest_call_success(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "backup": { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4" + "backup_policy": { + "name": "projects/sample1/locations/sample2/backupPolicies/sample3" } } - request_init["backup"] = { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4", - "state": 1, + request_init["backup_policy"] = { + "name": "projects/sample1/locations/sample2/backupPolicies/sample3", + "daily_backup_limit": 1894, + "weekly_backup_limit": 2020, + "monthly_backup_limit": 2142, "description": "description_value", - "volume_usage_bytes": 1938, - "backup_type": 1, - "source_volume": "source_volume_value", - "source_snapshot": "source_snapshot_value", + "enabled": True, + "assigned_volume_count": 2253, "create_time": {"seconds": 751, "nanos": 543}, "labels": {}, - "chain_storage_bytes": 2013, - "satisfies_pzs": True, - "satisfies_pzi": True, - "volume_region": "volume_region_value", - "backup_region": "backup_region_value", - "enforced_retention_end_time": {}, + "state": 1, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = gcn_backup.UpdateBackupRequest.meta.fields["backup"] + test_field = gcn_backup_policy.UpdateBackupPolicyRequest.meta.fields[ + "backup_policy" + ] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -46149,7 +50597,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup"].items(): # pragma: NO COVER + for field, value in request_init["backup_policy"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -46179,10 +50627,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["backup"][field])): - del request_init["backup"][field][i][subfield] + for i in range(0, len(request_init["backup_policy"][field])): + del request_init["backup_policy"][field][i][subfield] else: - del request_init["backup"][field][subfield] + del request_init["backup_policy"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -46197,14 +50645,14 @@ def get_message_fields(field): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_backup(request) + response = client.update_backup_policy(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_backup_rest_interceptors(null_interceptor): +def test_update_backup_policy_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -46218,16 +50666,18 @@ def test_update_backup_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.NetAppRestInterceptor, "post_update_backup" + transports.NetAppRestInterceptor, "post_update_backup_policy" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "post_update_backup_with_metadata" + transports.NetAppRestInterceptor, "post_update_backup_policy_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.NetAppRestInterceptor, "pre_update_backup" + transports.NetAppRestInterceptor, "pre_update_backup_policy" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = gcn_backup.UpdateBackupRequest.pb(gcn_backup.UpdateBackupRequest()) + pb_message = gcn_backup_policy.UpdateBackupPolicyRequest.pb( + gcn_backup_policy.UpdateBackupPolicyRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -46241,7 +50691,7 @@ def test_update_backup_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = gcn_backup.UpdateBackupRequest() + request = gcn_backup_policy.UpdateBackupPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -46250,7 +50700,7 @@ def test_update_backup_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.update_backup( + client.update_backup_policy( request, metadata=[ ("key", "val"), @@ -46263,14 +50713,14 @@ def test_update_backup_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_backup_policy_rest_bad_request( - request_type=gcn_backup_policy.CreateBackupPolicyRequest, +def test_delete_backup_policy_rest_bad_request( + request_type=backup_policy.DeleteBackupPolicyRequest, ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/backupPolicies/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -46285,126 +50735,174 @@ def test_create_backup_policy_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_backup_policy(request) + client.delete_backup_policy(request) @pytest.mark.parametrize( "request_type", [ - gcn_backup_policy.CreateBackupPolicyRequest, + backup_policy.DeleteBackupPolicyRequest, dict, ], ) -def test_create_backup_policy_rest_call_success(request_type): +def test_delete_backup_policy_rest_call_success(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["backup_policy"] = { - "name": "name_value", - "daily_backup_limit": 1894, - "weekly_backup_limit": 2020, - "monthly_backup_limit": 2142, - "description": "description_value", - "enabled": True, - "assigned_volume_count": 2253, - "create_time": {"seconds": 751, "nanos": 543}, - "labels": {}, - "state": 1, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + request_init = {"name": "projects/sample1/locations/sample2/backupPolicies/sample3"} + request = request_type(**request_init) - # Determine if the message type is proto-plus or protobuf - test_field = gcn_backup_policy.CreateBackupPolicyRequest.meta.fields[ - "backup_policy" - ] + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_backup_policy(request) - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_backup_policy_rest_interceptors(null_interceptor): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), + ) + client = NetAppClient(transport=transport) - subfields_not_in_runtime = [] + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_delete_backup_policy" + ) as post, mock.patch.object( + transports.NetAppRestInterceptor, "post_delete_backup_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.NetAppRestInterceptor, "pre_delete_backup_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = backup_policy.DeleteBackupPolicyRequest.pb( + backup_policy.DeleteBackupPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup_policy"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) + request = backup_policy.DeleteBackupPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["backup_policy"][field])): - del request_init["backup_policy"][field][i][subfield] - else: - del request_init["backup_policy"][field][subfield] + client.delete_backup_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_quota_rules_rest_bad_request( + request_type=quota_rule.ListQuotaRulesRequest, +): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_quota_rules(request) + + +@pytest.mark.parametrize( + "request_type", + [ + quota_rule.ListQuotaRulesRequest, + dict, + ], +) +def test_list_quota_rules_rest_call_success(request_type): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = quota_rule.ListQuotaRulesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = quota_rule.ListQuotaRulesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_backup_policy(request) + response = client.list_quota_rules(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, pagers.ListQuotaRulesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_backup_policy_rest_interceptors(null_interceptor): +def test_list_quota_rules_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -46416,19 +50914,17 @@ def test_create_backup_policy_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.NetAppRestInterceptor, "post_create_backup_policy" + transports.NetAppRestInterceptor, "post_list_quota_rules" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "post_create_backup_policy_with_metadata" + transports.NetAppRestInterceptor, "post_list_quota_rules_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.NetAppRestInterceptor, "pre_create_backup_policy" + transports.NetAppRestInterceptor, "pre_list_quota_rules" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = gcn_backup_policy.CreateBackupPolicyRequest.pb( - gcn_backup_policy.CreateBackupPolicyRequest() + pb_message = quota_rule.ListQuotaRulesRequest.pb( + quota_rule.ListQuotaRulesRequest() ) transcode.return_value = { "method": "post", @@ -46440,19 +50936,21 @@ def test_create_backup_policy_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = quota_rule.ListQuotaRulesResponse.to_json( + quota_rule.ListQuotaRulesResponse() + ) req.return_value.content = return_value - request = gcn_backup_policy.CreateBackupPolicyRequest() + request = quota_rule.ListQuotaRulesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = quota_rule.ListQuotaRulesResponse() + post_with_metadata.return_value = quota_rule.ListQuotaRulesResponse(), metadata - client.create_backup_policy( + client.list_quota_rules( request, metadata=[ ("key", "val"), @@ -46465,14 +50963,14 @@ def test_create_backup_policy_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_backup_policy_rest_bad_request( - request_type=backup_policy.GetBackupPolicyRequest, -): +def test_get_quota_rule_rest_bad_request(request_type=quota_rule.GetQuotaRuleRequest): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/backupPolicies/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/volumes/sample3/quotaRules/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -46487,37 +50985,38 @@ def test_get_backup_policy_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_backup_policy(request) + client.get_quota_rule(request) @pytest.mark.parametrize( "request_type", [ - backup_policy.GetBackupPolicyRequest, + quota_rule.GetQuotaRuleRequest, dict, ], ) -def test_get_backup_policy_rest_call_success(request_type): +def test_get_quota_rule_rest_call_success(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/backupPolicies/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/volumes/sample3/quotaRules/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backup_policy.BackupPolicy( + return_value = quota_rule.QuotaRule( name="name_value", - daily_backup_limit=1894, - weekly_backup_limit=2020, - monthly_backup_limit=2142, + target="target_value", + type_=quota_rule.QuotaRule.Type.INDIVIDUAL_USER_QUOTA, + disk_limit_mib=1472, + state=quota_rule.QuotaRule.State.CREATING, + state_details="state_details_value", description="description_value", - enabled=True, - assigned_volume_count=2253, - state=backup_policy.BackupPolicy.State.CREATING, ) # Wrap the value into a proper Response obj @@ -46525,27 +51024,26 @@ def test_get_backup_policy_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = backup_policy.BackupPolicy.pb(return_value) + return_value = quota_rule.QuotaRule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_backup_policy(request) + response = client.get_quota_rule(request) # Establish that the response is the type that we expect. - assert isinstance(response, backup_policy.BackupPolicy) + assert isinstance(response, quota_rule.QuotaRule) assert response.name == "name_value" - assert response.daily_backup_limit == 1894 - assert response.weekly_backup_limit == 2020 - assert response.monthly_backup_limit == 2142 + assert response.target == "target_value" + assert response.type_ == quota_rule.QuotaRule.Type.INDIVIDUAL_USER_QUOTA + assert response.disk_limit_mib == 1472 + assert response.state == quota_rule.QuotaRule.State.CREATING + assert response.state_details == "state_details_value" assert response.description == "description_value" - assert response.enabled is True - assert response.assigned_volume_count == 2253 - assert response.state == backup_policy.BackupPolicy.State.CREATING @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_backup_policy_rest_interceptors(null_interceptor): +def test_get_quota_rule_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -46557,18 +51055,16 @@ def test_get_backup_policy_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.NetAppRestInterceptor, "post_get_backup_policy" + transports.NetAppRestInterceptor, "post_get_quota_rule" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "post_get_backup_policy_with_metadata" + transports.NetAppRestInterceptor, "post_get_quota_rule_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.NetAppRestInterceptor, "pre_get_backup_policy" + transports.NetAppRestInterceptor, "pre_get_quota_rule" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backup_policy.GetBackupPolicyRequest.pb( - backup_policy.GetBackupPolicyRequest() - ) + pb_message = quota_rule.GetQuotaRuleRequest.pb(quota_rule.GetQuotaRuleRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -46579,19 +51075,19 @@ def test_get_backup_policy_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backup_policy.BackupPolicy.to_json(backup_policy.BackupPolicy()) + return_value = quota_rule.QuotaRule.to_json(quota_rule.QuotaRule()) req.return_value.content = return_value - request = backup_policy.GetBackupPolicyRequest() + request = quota_rule.GetQuotaRuleRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backup_policy.BackupPolicy() - post_with_metadata.return_value = backup_policy.BackupPolicy(), metadata + post.return_value = quota_rule.QuotaRule() + post_with_metadata.return_value = quota_rule.QuotaRule(), metadata - client.get_backup_policy( + client.get_quota_rule( request, metadata=[ ("key", "val"), @@ -46604,14 +51100,14 @@ def test_get_backup_policy_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_backup_policies_rest_bad_request( - request_type=backup_policy.ListBackupPoliciesRequest, +def test_create_quota_rule_rest_bad_request( + request_type=gcn_quota_rule.CreateQuotaRuleRequest, ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -46626,53 +51122,123 @@ def test_list_backup_policies_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backup_policies(request) + client.create_quota_rule(request) @pytest.mark.parametrize( "request_type", [ - backup_policy.ListBackupPoliciesRequest, + gcn_quota_rule.CreateQuotaRuleRequest, dict, ], ) -def test_list_backup_policies_rest_call_success(request_type): +def test_create_quota_rule_rest_call_success(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} + request_init["quota_rule"] = { + "name": "name_value", + "target": "target_value", + "type_": 1, + "disk_limit_mib": 1472, + "state": 1, + "state_details": "state_details_value", + "create_time": {"seconds": 751, "nanos": 543}, + "description": "description_value", + "labels": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_quota_rule.CreateQuotaRuleRequest.meta.fields["quota_rule"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["quota_rule"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["quota_rule"][field])): + del request_init["quota_rule"][field][i][subfield] + else: + del request_init["quota_rule"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backup_policy.ListBackupPoliciesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup_policy.ListBackupPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backup_policies(request) + response = client.create_quota_rule(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupPoliciesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_backup_policies_rest_interceptors(null_interceptor): +def test_create_quota_rule_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -46684,17 +51250,19 @@ def test_list_backup_policies_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.NetAppRestInterceptor, "post_list_backup_policies" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_create_quota_rule" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "post_list_backup_policies_with_metadata" + transports.NetAppRestInterceptor, "post_create_quota_rule_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.NetAppRestInterceptor, "pre_list_backup_policies" + transports.NetAppRestInterceptor, "pre_create_quota_rule" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backup_policy.ListBackupPoliciesRequest.pb( - backup_policy.ListBackupPoliciesRequest() + pb_message = gcn_quota_rule.CreateQuotaRuleRequest.pb( + gcn_quota_rule.CreateQuotaRuleRequest() ) transcode.return_value = { "method": "post", @@ -46706,24 +51274,19 @@ def test_list_backup_policies_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backup_policy.ListBackupPoliciesResponse.to_json( - backup_policy.ListBackupPoliciesResponse() - ) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = backup_policy.ListBackupPoliciesRequest() + request = gcn_quota_rule.CreateQuotaRuleRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backup_policy.ListBackupPoliciesResponse() - post_with_metadata.return_value = ( - backup_policy.ListBackupPoliciesResponse(), - metadata, - ) + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.list_backup_policies( + client.create_quota_rule( request, metadata=[ ("key", "val"), @@ -46736,16 +51299,16 @@ def test_list_backup_policies_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_update_backup_policy_rest_bad_request( - request_type=gcn_backup_policy.UpdateBackupPolicyRequest, +def test_update_quota_rule_rest_bad_request( + request_type=gcn_quota_rule.UpdateQuotaRuleRequest, ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "backup_policy": { - "name": "projects/sample1/locations/sample2/backupPolicies/sample3" + "quota_rule": { + "name": "projects/sample1/locations/sample2/volumes/sample3/quotaRules/sample4" } } request = request_type(**request_init) @@ -46762,47 +51325,44 @@ def test_update_backup_policy_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_backup_policy(request) + client.update_quota_rule(request) @pytest.mark.parametrize( "request_type", [ - gcn_backup_policy.UpdateBackupPolicyRequest, + gcn_quota_rule.UpdateQuotaRuleRequest, dict, ], ) -def test_update_backup_policy_rest_call_success(request_type): +def test_update_quota_rule_rest_call_success(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "backup_policy": { - "name": "projects/sample1/locations/sample2/backupPolicies/sample3" + "quota_rule": { + "name": "projects/sample1/locations/sample2/volumes/sample3/quotaRules/sample4" } } - request_init["backup_policy"] = { - "name": "projects/sample1/locations/sample2/backupPolicies/sample3", - "daily_backup_limit": 1894, - "weekly_backup_limit": 2020, - "monthly_backup_limit": 2142, - "description": "description_value", - "enabled": True, - "assigned_volume_count": 2253, + request_init["quota_rule"] = { + "name": "projects/sample1/locations/sample2/volumes/sample3/quotaRules/sample4", + "target": "target_value", + "type_": 1, + "disk_limit_mib": 1472, + "state": 1, + "state_details": "state_details_value", "create_time": {"seconds": 751, "nanos": 543}, + "description": "description_value", "labels": {}, - "state": 1, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = gcn_backup_policy.UpdateBackupPolicyRequest.meta.fields[ - "backup_policy" - ] + test_field = gcn_quota_rule.UpdateQuotaRuleRequest.meta.fields["quota_rule"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -46830,7 +51390,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup_policy"].items(): # pragma: NO COVER + for field, value in request_init["quota_rule"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -46860,10 +51420,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["backup_policy"][field])): - del request_init["backup_policy"][field][i][subfield] + for i in range(0, len(request_init["quota_rule"][field])): + del request_init["quota_rule"][field][i][subfield] else: - del request_init["backup_policy"][field][subfield] + del request_init["quota_rule"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -46878,14 +51438,14 @@ def get_message_fields(field): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_backup_policy(request) + response = client.update_quota_rule(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_backup_policy_rest_interceptors(null_interceptor): +def test_update_quota_rule_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -46899,17 +51459,17 @@ def test_update_backup_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.NetAppRestInterceptor, "post_update_backup_policy" + transports.NetAppRestInterceptor, "post_update_quota_rule" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "post_update_backup_policy_with_metadata" + transports.NetAppRestInterceptor, "post_update_quota_rule_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.NetAppRestInterceptor, "pre_update_backup_policy" + transports.NetAppRestInterceptor, "pre_update_quota_rule" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = gcn_backup_policy.UpdateBackupPolicyRequest.pb( - gcn_backup_policy.UpdateBackupPolicyRequest() + pb_message = gcn_quota_rule.UpdateQuotaRuleRequest.pb( + gcn_quota_rule.UpdateQuotaRuleRequest() ) transcode.return_value = { "method": "post", @@ -46924,7 +51484,7 @@ def test_update_backup_policy_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = gcn_backup_policy.UpdateBackupPolicyRequest() + request = gcn_quota_rule.UpdateQuotaRuleRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -46933,7 +51493,7 @@ def test_update_backup_policy_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.update_backup_policy( + client.update_quota_rule( request, metadata=[ ("key", "val"), @@ -46946,14 +51506,16 @@ def test_update_backup_policy_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_backup_policy_rest_bad_request( - request_type=backup_policy.DeleteBackupPolicyRequest, +def test_delete_quota_rule_rest_bad_request( + request_type=quota_rule.DeleteQuotaRuleRequest, ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/backupPolicies/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/volumes/sample3/quotaRules/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -46968,23 +51530,25 @@ def test_delete_backup_policy_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_backup_policy(request) + client.delete_quota_rule(request) @pytest.mark.parametrize( "request_type", [ - backup_policy.DeleteBackupPolicyRequest, + quota_rule.DeleteQuotaRuleRequest, dict, ], ) -def test_delete_backup_policy_rest_call_success(request_type): +def test_delete_quota_rule_rest_call_success(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/backupPolicies/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/volumes/sample3/quotaRules/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -46999,14 +51563,14 @@ def test_delete_backup_policy_rest_call_success(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_backup_policy(request) + response = client.delete_quota_rule(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_backup_policy_rest_interceptors(null_interceptor): +def test_delete_quota_rule_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -47020,17 +51584,17 @@ def test_delete_backup_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.NetAppRestInterceptor, "post_delete_backup_policy" + transports.NetAppRestInterceptor, "post_delete_quota_rule" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "post_delete_backup_policy_with_metadata" + transports.NetAppRestInterceptor, "post_delete_quota_rule_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.NetAppRestInterceptor, "pre_delete_backup_policy" + transports.NetAppRestInterceptor, "pre_delete_quota_rule" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backup_policy.DeleteBackupPolicyRequest.pb( - backup_policy.DeleteBackupPolicyRequest() + pb_message = quota_rule.DeleteQuotaRuleRequest.pb( + quota_rule.DeleteQuotaRuleRequest() ) transcode.return_value = { "method": "post", @@ -47045,7 +51609,7 @@ def test_delete_backup_policy_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = backup_policy.DeleteBackupPolicyRequest() + request = quota_rule.DeleteQuotaRuleRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -47054,7 +51618,7 @@ def test_delete_backup_policy_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.delete_backup_policy( + client.delete_quota_rule( request, metadata=[ ("key", "val"), @@ -47067,14 +51631,14 @@ def test_delete_backup_policy_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_quota_rules_rest_bad_request( - request_type=quota_rule.ListQuotaRulesRequest, +def test_restore_backup_files_rest_bad_request( + request_type=volume.RestoreBackupFilesRequest, ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/volumes/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -47089,53 +51653,45 @@ def test_list_quota_rules_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_quota_rules(request) + client.restore_backup_files(request) @pytest.mark.parametrize( "request_type", [ - quota_rule.ListQuotaRulesRequest, + volume.RestoreBackupFilesRequest, dict, ], ) -def test_list_quota_rules_rest_call_success(request_type): +def test_restore_backup_files_rest_call_success(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/volumes/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = quota_rule.ListQuotaRulesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = quota_rule.ListQuotaRulesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_quota_rules(request) + response = client.restore_backup_files(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListQuotaRulesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_quota_rules_rest_interceptors(null_interceptor): +def test_restore_backup_files_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -47147,17 +51703,19 @@ def test_list_quota_rules_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.NetAppRestInterceptor, "post_list_quota_rules" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_restore_backup_files" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "post_list_quota_rules_with_metadata" + transports.NetAppRestInterceptor, "post_restore_backup_files_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.NetAppRestInterceptor, "pre_list_quota_rules" + transports.NetAppRestInterceptor, "pre_restore_backup_files" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = quota_rule.ListQuotaRulesRequest.pb( - quota_rule.ListQuotaRulesRequest() + pb_message = volume.RestoreBackupFilesRequest.pb( + volume.RestoreBackupFilesRequest() ) transcode.return_value = { "method": "post", @@ -47169,21 +51727,19 @@ def test_list_quota_rules_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = quota_rule.ListQuotaRulesResponse.to_json( - quota_rule.ListQuotaRulesResponse() - ) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = quota_rule.ListQuotaRulesRequest() + request = volume.RestoreBackupFilesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = quota_rule.ListQuotaRulesResponse() - post_with_metadata.return_value = quota_rule.ListQuotaRulesResponse(), metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.list_quota_rules( + client.restore_backup_files( request, metadata=[ ("key", "val"), @@ -47196,14 +51752,14 @@ def test_list_quota_rules_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_quota_rule_rest_bad_request(request_type=quota_rule.GetQuotaRuleRequest): +def test_list_host_groups_rest_bad_request( + request_type=host_group.ListHostGroupsRequest, +): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/volumes/sample3/quotaRules/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -47218,38 +51774,31 @@ def test_get_quota_rule_rest_bad_request(request_type=quota_rule.GetQuotaRuleReq response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_quota_rule(request) + client.list_host_groups(request) @pytest.mark.parametrize( "request_type", [ - quota_rule.GetQuotaRuleRequest, + host_group.ListHostGroupsRequest, dict, ], ) -def test_get_quota_rule_rest_call_success(request_type): +def test_list_host_groups_rest_call_success(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/volumes/sample3/quotaRules/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = quota_rule.QuotaRule( - name="name_value", - target="target_value", - type_=quota_rule.QuotaRule.Type.INDIVIDUAL_USER_QUOTA, - disk_limit_mib=1472, - state=quota_rule.QuotaRule.State.CREATING, - state_details="state_details_value", - description="description_value", + return_value = host_group.ListHostGroupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -47257,26 +51806,21 @@ def test_get_quota_rule_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = quota_rule.QuotaRule.pb(return_value) + return_value = host_group.ListHostGroupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_quota_rule(request) + response = client.list_host_groups(request) # Establish that the response is the type that we expect. - assert isinstance(response, quota_rule.QuotaRule) - assert response.name == "name_value" - assert response.target == "target_value" - assert response.type_ == quota_rule.QuotaRule.Type.INDIVIDUAL_USER_QUOTA - assert response.disk_limit_mib == 1472 - assert response.state == quota_rule.QuotaRule.State.CREATING - assert response.state_details == "state_details_value" - assert response.description == "description_value" + assert isinstance(response, pagers.ListHostGroupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_quota_rule_rest_interceptors(null_interceptor): +def test_list_host_groups_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -47288,16 +51832,18 @@ def test_get_quota_rule_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.NetAppRestInterceptor, "post_get_quota_rule" + transports.NetAppRestInterceptor, "post_list_host_groups" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "post_get_quota_rule_with_metadata" + transports.NetAppRestInterceptor, "post_list_host_groups_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.NetAppRestInterceptor, "pre_get_quota_rule" + transports.NetAppRestInterceptor, "pre_list_host_groups" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = quota_rule.GetQuotaRuleRequest.pb(quota_rule.GetQuotaRuleRequest()) + pb_message = host_group.ListHostGroupsRequest.pb( + host_group.ListHostGroupsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -47308,19 +51854,21 @@ def test_get_quota_rule_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = quota_rule.QuotaRule.to_json(quota_rule.QuotaRule()) + return_value = host_group.ListHostGroupsResponse.to_json( + host_group.ListHostGroupsResponse() + ) req.return_value.content = return_value - request = quota_rule.GetQuotaRuleRequest() + request = host_group.ListHostGroupsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = quota_rule.QuotaRule() - post_with_metadata.return_value = quota_rule.QuotaRule(), metadata + post.return_value = host_group.ListHostGroupsResponse() + post_with_metadata.return_value = host_group.ListHostGroupsResponse(), metadata - client.get_quota_rule( + client.list_host_groups( request, metadata=[ ("key", "val"), @@ -47333,14 +51881,12 @@ def test_get_quota_rule_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_quota_rule_rest_bad_request( - request_type=gcn_quota_rule.CreateQuotaRuleRequest, -): +def test_get_host_group_rest_bad_request(request_type=host_group.GetHostGroupRequest): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/hostGroups/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -47355,123 +51901,61 @@ def test_create_quota_rule_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_quota_rule(request) + client.get_host_group(request) @pytest.mark.parametrize( "request_type", [ - gcn_quota_rule.CreateQuotaRuleRequest, + host_group.GetHostGroupRequest, dict, ], ) -def test_create_quota_rule_rest_call_success(request_type): +def test_get_host_group_rest_call_success(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} - request_init["quota_rule"] = { - "name": "name_value", - "target": "target_value", - "type_": 1, - "disk_limit_mib": 1472, - "state": 1, - "state_details": "state_details_value", - "create_time": {"seconds": 751, "nanos": 543}, - "description": "description_value", - "labels": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gcn_quota_rule.CreateQuotaRuleRequest.meta.fields["quota_rule"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["quota_rule"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["quota_rule"][field])): - del request_init["quota_rule"][field][i][subfield] - else: - del request_init["quota_rule"][field][subfield] + request_init = {"name": "projects/sample1/locations/sample2/hostGroups/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = host_group.HostGroup( + name="name_value", + type_=host_group.HostGroup.Type.ISCSI_INITIATOR, + state=host_group.HostGroup.State.CREATING, + hosts=["hosts_value"], + os_type=common.OsType.LINUX, + description="description_value", + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = host_group.HostGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_quota_rule(request) + response = client.get_host_group(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, host_group.HostGroup) + assert response.name == "name_value" + assert response.type_ == host_group.HostGroup.Type.ISCSI_INITIATOR + assert response.state == host_group.HostGroup.State.CREATING + assert response.hosts == ["hosts_value"] + assert response.os_type == common.OsType.LINUX + assert response.description == "description_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_quota_rule_rest_interceptors(null_interceptor): +def test_get_host_group_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -47483,20 +51967,16 @@ def test_create_quota_rule_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.NetAppRestInterceptor, "post_create_quota_rule" + transports.NetAppRestInterceptor, "post_get_host_group" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "post_create_quota_rule_with_metadata" + transports.NetAppRestInterceptor, "post_get_host_group_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.NetAppRestInterceptor, "pre_create_quota_rule" + transports.NetAppRestInterceptor, "pre_get_host_group" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = gcn_quota_rule.CreateQuotaRuleRequest.pb( - gcn_quota_rule.CreateQuotaRuleRequest() - ) + pb_message = host_group.GetHostGroupRequest.pb(host_group.GetHostGroupRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -47507,19 +51987,19 @@ def test_create_quota_rule_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = host_group.HostGroup.to_json(host_group.HostGroup()) req.return_value.content = return_value - request = gcn_quota_rule.CreateQuotaRuleRequest() + request = host_group.GetHostGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = host_group.HostGroup() + post_with_metadata.return_value = host_group.HostGroup(), metadata - client.create_quota_rule( + client.get_host_group( request, metadata=[ ("key", "val"), @@ -47532,18 +52012,14 @@ def test_create_quota_rule_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_update_quota_rule_rest_bad_request( - request_type=gcn_quota_rule.UpdateQuotaRuleRequest, +def test_create_host_group_rest_bad_request( + request_type=gcn_host_group.CreateHostGroupRequest, ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "quota_rule": { - "name": "projects/sample1/locations/sample2/volumes/sample3/quotaRules/sample4" - } - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -47558,35 +52034,30 @@ def test_update_quota_rule_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_quota_rule(request) + client.create_host_group(request) @pytest.mark.parametrize( "request_type", [ - gcn_quota_rule.UpdateQuotaRuleRequest, + gcn_host_group.CreateHostGroupRequest, dict, ], ) -def test_update_quota_rule_rest_call_success(request_type): +def test_create_host_group_rest_call_success(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "quota_rule": { - "name": "projects/sample1/locations/sample2/volumes/sample3/quotaRules/sample4" - } - } - request_init["quota_rule"] = { - "name": "projects/sample1/locations/sample2/volumes/sample3/quotaRules/sample4", - "target": "target_value", + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["host_group"] = { + "name": "name_value", "type_": 1, - "disk_limit_mib": 1472, "state": 1, - "state_details": "state_details_value", "create_time": {"seconds": 751, "nanos": 543}, + "hosts": ["hosts_value1", "hosts_value2"], + "os_type": 1, "description": "description_value", "labels": {}, } @@ -47595,7 +52066,7 @@ def test_update_quota_rule_rest_call_success(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = gcn_quota_rule.UpdateQuotaRuleRequest.meta.fields["quota_rule"] + test_field = gcn_host_group.CreateHostGroupRequest.meta.fields["host_group"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -47623,7 +52094,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["quota_rule"].items(): # pragma: NO COVER + for field, value in request_init["host_group"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -47653,10 +52124,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["quota_rule"][field])): - del request_init["quota_rule"][field][i][subfield] + for i in range(0, len(request_init["host_group"][field])): + del request_init["host_group"][field][i][subfield] else: - del request_init["quota_rule"][field][subfield] + del request_init["host_group"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -47671,14 +52142,14 @@ def get_message_fields(field): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_quota_rule(request) + response = client.create_host_group(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_quota_rule_rest_interceptors(null_interceptor): +def test_create_host_group_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -47692,17 +52163,17 @@ def test_update_quota_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.NetAppRestInterceptor, "post_update_quota_rule" + transports.NetAppRestInterceptor, "post_create_host_group" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "post_update_quota_rule_with_metadata" + transports.NetAppRestInterceptor, "post_create_host_group_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.NetAppRestInterceptor, "pre_update_quota_rule" + transports.NetAppRestInterceptor, "pre_create_host_group" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = gcn_quota_rule.UpdateQuotaRuleRequest.pb( - gcn_quota_rule.UpdateQuotaRuleRequest() + pb_message = gcn_host_group.CreateHostGroupRequest.pb( + gcn_host_group.CreateHostGroupRequest() ) transcode.return_value = { "method": "post", @@ -47717,7 +52188,7 @@ def test_update_quota_rule_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = gcn_quota_rule.UpdateQuotaRuleRequest() + request = gcn_host_group.CreateHostGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -47726,7 +52197,7 @@ def test_update_quota_rule_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.update_quota_rule( + client.create_host_group( request, metadata=[ ("key", "val"), @@ -47739,15 +52210,15 @@ def test_update_quota_rule_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_quota_rule_rest_bad_request( - request_type=quota_rule.DeleteQuotaRuleRequest, +def test_update_host_group_rest_bad_request( + request_type=gcn_host_group.UpdateHostGroupRequest, ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/volumes/sample3/quotaRules/sample4" + "host_group": {"name": "projects/sample1/locations/sample2/hostGroups/sample3"} } request = request_type(**request_init) @@ -47763,25 +52234,102 @@ def test_delete_quota_rule_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_quota_rule(request) + client.update_host_group(request) @pytest.mark.parametrize( "request_type", [ - quota_rule.DeleteQuotaRuleRequest, + gcn_host_group.UpdateHostGroupRequest, dict, ], ) -def test_delete_quota_rule_rest_call_success(request_type): +def test_update_host_group_rest_call_success(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/volumes/sample3/quotaRules/sample4" + "host_group": {"name": "projects/sample1/locations/sample2/hostGroups/sample3"} + } + request_init["host_group"] = { + "name": "projects/sample1/locations/sample2/hostGroups/sample3", + "type_": 1, + "state": 1, + "create_time": {"seconds": 751, "nanos": 543}, + "hosts": ["hosts_value1", "hosts_value2"], + "os_type": 1, + "description": "description_value", + "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_host_group.UpdateHostGroupRequest.meta.fields["host_group"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["host_group"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["host_group"][field])): + del request_init["host_group"][field][i][subfield] + else: + del request_init["host_group"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -47796,14 +52344,14 @@ def test_delete_quota_rule_rest_call_success(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_quota_rule(request) + response = client.update_host_group(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_quota_rule_rest_interceptors(null_interceptor): +def test_update_host_group_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -47817,17 +52365,17 @@ def test_delete_quota_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.NetAppRestInterceptor, "post_delete_quota_rule" + transports.NetAppRestInterceptor, "post_update_host_group" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "post_delete_quota_rule_with_metadata" + transports.NetAppRestInterceptor, "post_update_host_group_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.NetAppRestInterceptor, "pre_delete_quota_rule" + transports.NetAppRestInterceptor, "pre_update_host_group" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = quota_rule.DeleteQuotaRuleRequest.pb( - quota_rule.DeleteQuotaRuleRequest() + pb_message = gcn_host_group.UpdateHostGroupRequest.pb( + gcn_host_group.UpdateHostGroupRequest() ) transcode.return_value = { "method": "post", @@ -47842,7 +52390,7 @@ def test_delete_quota_rule_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = quota_rule.DeleteQuotaRuleRequest() + request = gcn_host_group.UpdateHostGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -47851,7 +52399,128 @@ def test_delete_quota_rule_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.delete_quota_rule( + client.update_host_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_host_group_rest_bad_request( + request_type=host_group.DeleteHostGroupRequest, +): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/hostGroups/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_host_group(request) + + +@pytest.mark.parametrize( + "request_type", + [ + host_group.DeleteHostGroupRequest, + dict, + ], +) +def test_delete_host_group_rest_call_success(request_type): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/hostGroups/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_host_group(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_host_group_rest_interceptors(null_interceptor): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), + ) + client = NetAppClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_delete_host_group" + ) as post, mock.patch.object( + transports.NetAppRestInterceptor, "post_delete_host_group_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.NetAppRestInterceptor, "pre_delete_host_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = host_group.DeleteHostGroupRequest.pb( + host_group.DeleteHostGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = host_group.DeleteHostGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_host_group( request, metadata=[ ("key", "val"), @@ -49507,6 +54176,134 @@ def test_delete_quota_rule_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_restore_backup_files_empty_call_rest(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.restore_backup_files), "__call__" + ) as call: + client.restore_backup_files(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = volume.RestoreBackupFilesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_host_groups_empty_call_rest(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_host_groups), "__call__") as call: + client.list_host_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = host_group.ListHostGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_host_group_empty_call_rest(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_host_group), "__call__") as call: + client.get_host_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = host_group.GetHostGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_host_group_empty_call_rest(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_host_group), "__call__" + ) as call: + client.create_host_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_host_group.CreateHostGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_host_group_empty_call_rest(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_host_group), "__call__" + ) as call: + client.update_host_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_host_group.UpdateHostGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_host_group_empty_call_rest(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_host_group), "__call__" + ) as call: + client.delete_host_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = host_group.DeleteHostGroupRequest() + + assert args[0] == request_msg + + def test_net_app_rest_lro_client(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -49617,6 +54414,12 @@ def test_net_app_base_transport(): "create_quota_rule", "update_quota_rule", "delete_quota_rule", + "restore_backup_files", + "list_host_groups", + "get_host_group", + "create_host_group", + "update_host_group", + "delete_host_group", "get_location", "list_locations", "get_operation", @@ -50063,6 +54866,24 @@ def test_net_app_client_transport_session_collision(transport_name): session1 = client1.transport.delete_quota_rule._session session2 = client2.transport.delete_quota_rule._session assert session1 != session2 + session1 = client1.transport.restore_backup_files._session + session2 = client2.transport.restore_backup_files._session + assert session1 != session2 + session1 = client1.transport.list_host_groups._session + session2 = client2.transport.list_host_groups._session + assert session1 != session2 + session1 = client1.transport.get_host_group._session + session2 = client2.transport.get_host_group._session + assert session1 != session2 + session1 = client1.transport.create_host_group._session + session2 = client2.transport.create_host_group._session + assert session1 != session2 + session1 = client1.transport.update_host_group._session + session2 = client2.transport.update_host_group._session + assert session1 != session2 + session1 = client1.transport.delete_host_group._session + session2 = client2.transport.delete_host_group._session + assert session1 != session2 def test_net_app_grpc_transport_channel(): @@ -50329,10 +55150,36 @@ def test_parse_backup_vault_path(): assert expected == actual -def test_kms_config_path(): +def test_host_group_path(): project = "whelk" location = "octopus" - kms_config = "oyster" + host_group = "oyster" + expected = "projects/{project}/locations/{location}/hostGroups/{host_group}".format( + project=project, + location=location, + host_group=host_group, + ) + actual = NetAppClient.host_group_path(project, location, host_group) + assert expected == actual + + +def test_parse_host_group_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "host_group": "mussel", + } + path = NetAppClient.host_group_path(**expected) + + # Check that the path construction is reversible. + actual = NetAppClient.parse_host_group_path(path) + assert expected == actual + + +def test_kms_config_path(): + project = "winkle" + location = "nautilus" + kms_config = "scallop" expected = "projects/{project}/locations/{location}/kmsConfigs/{kms_config}".format( project=project, location=location, @@ -50344,9 +55191,9 @@ def test_kms_config_path(): def test_parse_kms_config_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "kms_config": "mussel", + "project": "abalone", + "location": "squid", + "kms_config": "clam", } path = NetAppClient.kms_config_path(**expected) @@ -50356,8 +55203,8 @@ def test_parse_kms_config_path(): def test_network_path(): - project = "winkle" - network = "nautilus" + project = "whelk" + network = "octopus" expected = "projects/{project}/global/networks/{network}".format( project=project, network=network, @@ -50368,8 +55215,8 @@ def test_network_path(): def test_parse_network_path(): expected = { - "project": "scallop", - "network": "abalone", + "project": "oyster", + "network": "nudibranch", } path = NetAppClient.network_path(**expected) @@ -50379,10 +55226,10 @@ def test_parse_network_path(): def test_quota_rule_path(): - project = "squid" - location = "clam" - volume = "whelk" - quota_rule = "octopus" + project = "cuttlefish" + location = "mussel" + volume = "winkle" + quota_rule = "nautilus" expected = "projects/{project}/locations/{location}/volumes/{volume}/quotaRules/{quota_rule}".format( project=project, location=location, @@ -50395,10 +55242,10 @@ def test_quota_rule_path(): def test_parse_quota_rule_path(): expected = { - "project": "oyster", - "location": "nudibranch", - "volume": "cuttlefish", - "quota_rule": "mussel", + "project": "scallop", + "location": "abalone", + "volume": "squid", + "quota_rule": "clam", } path = NetAppClient.quota_rule_path(**expected) @@ -50408,10 +55255,10 @@ def test_parse_quota_rule_path(): def test_replication_path(): - project = "winkle" - location = "nautilus" - volume = "scallop" - replication = "abalone" + project = "whelk" + location = "octopus" + volume = "oyster" + replication = "nudibranch" expected = "projects/{project}/locations/{location}/volumes/{volume}/replications/{replication}".format( project=project, location=location, @@ -50424,10 +55271,10 @@ def test_replication_path(): def test_parse_replication_path(): expected = { - "project": "squid", - "location": "clam", - "volume": "whelk", - "replication": "octopus", + "project": "cuttlefish", + "location": "mussel", + "volume": "winkle", + "replication": "nautilus", } path = NetAppClient.replication_path(**expected) @@ -50437,10 +55284,10 @@ def test_parse_replication_path(): def test_snapshot_path(): - project = "oyster" - location = "nudibranch" - volume = "cuttlefish" - snapshot = "mussel" + project = "scallop" + location = "abalone" + volume = "squid" + snapshot = "clam" expected = "projects/{project}/locations/{location}/volumes/{volume}/snapshots/{snapshot}".format( project=project, location=location, @@ -50453,10 +55300,10 @@ def test_snapshot_path(): def test_parse_snapshot_path(): expected = { - "project": "winkle", - "location": "nautilus", - "volume": "scallop", - "snapshot": "abalone", + "project": "whelk", + "location": "octopus", + "volume": "oyster", + "snapshot": "nudibranch", } path = NetAppClient.snapshot_path(**expected) @@ -50466,9 +55313,9 @@ def test_parse_snapshot_path(): def test_storage_pool_path(): - project = "squid" - location = "clam" - storage_pool = "whelk" + project = "cuttlefish" + location = "mussel" + storage_pool = "winkle" expected = ( "projects/{project}/locations/{location}/storagePools/{storage_pool}".format( project=project, @@ -50482,9 +55329,9 @@ def test_storage_pool_path(): def test_parse_storage_pool_path(): expected = { - "project": "octopus", - "location": "oyster", - "storage_pool": "nudibranch", + "project": "nautilus", + "location": "scallop", + "storage_pool": "abalone", } path = NetAppClient.storage_pool_path(**expected) @@ -50494,9 +55341,9 @@ def test_parse_storage_pool_path(): def test_volume_path(): - project = "cuttlefish" - location = "mussel" - volume = "winkle" + project = "squid" + location = "clam" + volume = "whelk" expected = "projects/{project}/locations/{location}/volumes/{volume}".format( project=project, location=location, @@ -50508,9 +55355,9 @@ def test_volume_path(): def test_parse_volume_path(): expected = { - "project": "nautilus", - "location": "scallop", - "volume": "abalone", + "project": "octopus", + "location": "oyster", + "volume": "nudibranch", } path = NetAppClient.volume_path(**expected) @@ -50520,7 +55367,7 @@ def test_parse_volume_path(): def test_common_billing_account_path(): - billing_account = "squid" + billing_account = "cuttlefish" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -50530,7 +55377,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "clam", + "billing_account": "mussel", } path = NetAppClient.common_billing_account_path(**expected) @@ -50540,7 +55387,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "whelk" + folder = "winkle" expected = "folders/{folder}".format( folder=folder, ) @@ -50550,7 +55397,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "octopus", + "folder": "nautilus", } path = NetAppClient.common_folder_path(**expected) @@ -50560,7 +55407,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "oyster" + organization = "scallop" expected = "organizations/{organization}".format( organization=organization, ) @@ -50570,7 +55417,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nudibranch", + "organization": "abalone", } path = NetAppClient.common_organization_path(**expected) @@ -50580,7 +55427,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "cuttlefish" + project = "squid" expected = "projects/{project}".format( project=project, ) @@ -50590,7 +55437,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "mussel", + "project": "clam", } path = NetAppClient.common_project_path(**expected) @@ -50600,8 +55447,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "winkle" - location = "nautilus" + project = "whelk" + location = "octopus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -50612,8 +55459,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "scallop", - "location": "abalone", + "project": "oyster", + "location": "nudibranch", } path = NetAppClient.common_location_path(**expected) diff --git a/packages/grafeas/grafeas/grafeas/__init__.py b/packages/grafeas/grafeas/grafeas/__init__.py index 6ee991183cb0..de60275c7318 100644 --- a/packages/grafeas/grafeas/grafeas/__init__.py +++ b/packages/grafeas/grafeas/grafeas/__init__.py @@ -115,6 +115,11 @@ Source, SourceContext, ) +from grafeas.grafeas_v1.types.risk import ( + CISAKnownExploitedVulnerabilities, + ExploitPredictionScoringSystem, + Risk, +) from grafeas.grafeas_v1.types.sbom import ( SbomReferenceIntotoPayload, SbomReferenceIntotoPredicate, @@ -226,6 +231,9 @@ "RepoId", "Source", "SourceContext", + "CISAKnownExploitedVulnerabilities", + "ExploitPredictionScoringSystem", + "Risk", "SbomReferenceIntotoPayload", "SbomReferenceIntotoPredicate", "SBOMReferenceNote", diff --git a/packages/grafeas/grafeas/grafeas_v1/__init__.py b/packages/grafeas/grafeas/grafeas_v1/__init__.py index 6105ad4909f8..08dcee94d733 100644 --- a/packages/grafeas/grafeas/grafeas_v1/__init__.py +++ b/packages/grafeas/grafeas/grafeas_v1/__init__.py @@ -108,6 +108,11 @@ Source, SourceContext, ) +from .types.risk import ( + CISAKnownExploitedVulnerabilities, + ExploitPredictionScoringSystem, + Risk, +) from .types.sbom import ( SbomReferenceIntotoPayload, SbomReferenceIntotoPredicate, @@ -243,6 +248,7 @@ def _get_version(dependency_name): "BuildOccurrence", "BuildProvenance", "BuilderConfig", + "CISAKnownExploitedVulnerabilities", "CVSS", "CVSSVersion", "CVSSv3", @@ -266,6 +272,7 @@ def _get_version(dependency_name): "Distribution", "Envelope", "EnvelopeSignature", + "ExploitPredictionScoringSystem", "FileHashes", "FileLocation", "Fingerprint", @@ -303,6 +310,7 @@ def _get_version(dependency_name): "Recipe", "RelatedUrl", "RepoId", + "Risk", "SBOMReferenceNote", "SBOMReferenceOccurrence", "SbomReferenceIntotoPayload", diff --git a/packages/grafeas/grafeas/grafeas_v1/types/__init__.py b/packages/grafeas/grafeas/grafeas_v1/types/__init__.py index 66635ba28b86..2fa098841eda 100644 --- a/packages/grafeas/grafeas/grafeas_v1/types/__init__.py +++ b/packages/grafeas/grafeas/grafeas_v1/types/__init__.py @@ -92,6 +92,11 @@ Source, SourceContext, ) +from .risk import ( + CISAKnownExploitedVulnerabilities, + ExploitPredictionScoringSystem, + Risk, +) from .sbom import ( SbomReferenceIntotoPayload, SbomReferenceIntotoPredicate, @@ -193,6 +198,9 @@ "RepoId", "Source", "SourceContext", + "CISAKnownExploitedVulnerabilities", + "ExploitPredictionScoringSystem", + "Risk", "SbomReferenceIntotoPayload", "SbomReferenceIntotoPredicate", "SBOMReferenceNote", diff --git a/packages/grafeas/grafeas/grafeas_v1/types/discovery.py b/packages/grafeas/grafeas/grafeas_v1/types/discovery.py index 9a6b9d17c47e..43d706fbda35 100644 --- a/packages/grafeas/grafeas/grafeas_v1/types/discovery.py +++ b/packages/grafeas/grafeas/grafeas_v1/types/discovery.py @@ -83,6 +83,9 @@ class DiscoveryOccurrence(proto.Message): vulnerability_attestation (grafeas.grafeas_v1.types.DiscoveryOccurrence.VulnerabilityAttestation): The status of an vulnerability attestation generation. + files (MutableSequence[grafeas.grafeas_v1.types.DiscoveryOccurrence.File]): + Files that make up the resource described by + the occurrence. """ class ContinuousAnalysis(proto.Enum): @@ -229,6 +232,26 @@ class VulnerabilityAttestationState(proto.Enum): number=3, ) + class File(proto.Message): + r""" + + Attributes: + name (str): + + digest (MutableMapping[str, str]): + + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + digest: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + continuous_analysis: ContinuousAnalysis = proto.Field( proto.ENUM, number=1, @@ -278,6 +301,11 @@ class VulnerabilityAttestationState(proto.Enum): number=10, message=VulnerabilityAttestation, ) + files: MutableSequence[File] = proto.RepeatedField( + proto.MESSAGE, + number=11, + message=File, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/grafeas/grafeas/grafeas_v1/types/grafeas.py b/packages/grafeas/grafeas/grafeas_v1/types/grafeas.py index b23a2a679a9b..4c0864f0680d 100644 --- a/packages/grafeas/grafeas/grafeas_v1/types/grafeas.py +++ b/packages/grafeas/grafeas/grafeas_v1/types/grafeas.py @@ -517,6 +517,12 @@ class ListOccurrencesRequest(proto.Message): page_token (str): Token to provide to skip to a particular spot in the list. + return_partial_success (bool): + If set, the request will return all reachable Occurrences + and report all unreachable regions in the ``unreachable`` + field in the response. + + Only applicable for requests in the global region. """ parent: str = proto.Field( @@ -535,6 +541,10 @@ class ListOccurrencesRequest(proto.Message): proto.STRING, number=4, ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=5, + ) class ListOccurrencesResponse(proto.Message): @@ -547,6 +557,11 @@ class ListOccurrencesResponse(proto.Message): The next pagination token in the list response. It should be used as ``page_token`` for the following request. An empty value means no more results. + unreachable (MutableSequence[str]): + Unreachable regions. Populated for requests from the global + region when ``return_partial_success`` is set. + + Format: ``projects/[PROJECT_ID]/locations/[LOCATION]`` """ @property @@ -562,6 +577,10 @@ def raw_page(self): proto.STRING, number=2, ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) class DeleteOccurrenceRequest(proto.Message): @@ -678,6 +697,12 @@ class ListNotesRequest(proto.Message): page_token (str): Token to provide to skip to a particular spot in the list. + return_partial_success (bool): + If set, the request will return all reachable Notes and + report all unreachable regions in the ``unreachable`` field + in the response. + + Only applicable for requests in the global region. """ parent: str = proto.Field( @@ -696,6 +721,10 @@ class ListNotesRequest(proto.Message): proto.STRING, number=4, ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=5, + ) class ListNotesResponse(proto.Message): @@ -708,6 +737,11 @@ class ListNotesResponse(proto.Message): The next pagination token in the list response. It should be used as ``page_token`` for the following request. An empty value means no more results. + unreachable (MutableSequence[str]): + Unreachable regions. Populated for requests from the global + region when ``return_partial_success`` is set. + + Format: ``projects/[PROJECT_ID]/locations/[LOCATION]`` """ @property @@ -723,6 +757,10 @@ def raw_page(self): proto.STRING, number=2, ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) class DeleteNoteRequest(proto.Message): diff --git a/packages/grafeas/grafeas/grafeas_v1/types/risk.py b/packages/grafeas/grafeas/grafeas_v1/types/risk.py new file mode 100644 index 000000000000..0edb39640aa7 --- /dev/null +++ b/packages/grafeas/grafeas/grafeas_v1/types/risk.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="grafeas.v1", + manifest={ + "Risk", + "CISAKnownExploitedVulnerabilities", + "ExploitPredictionScoringSystem", + }, +) + + +class Risk(proto.Message): + r""" + + Attributes: + cisa_kev (grafeas.grafeas_v1.types.CISAKnownExploitedVulnerabilities): + CISA maintains the authoritative source of + vulnerabilities that have been exploited in the + wild. + epss (grafeas.grafeas_v1.types.ExploitPredictionScoringSystem): + The Exploit Prediction Scoring System (EPSS) + estimates the likelihood (probability) that a + software vulnerability will be exploited in the + wild. + """ + + cisa_kev: "CISAKnownExploitedVulnerabilities" = proto.Field( + proto.MESSAGE, + number=1, + message="CISAKnownExploitedVulnerabilities", + ) + epss: "ExploitPredictionScoringSystem" = proto.Field( + proto.MESSAGE, + number=2, + message="ExploitPredictionScoringSystem", + ) + + +class CISAKnownExploitedVulnerabilities(proto.Message): + r""" + + Attributes: + known_ransomware_campaign_use (str): + Whether the vulnerability is known to have + been leveraged as part of a ransomware campaign. + """ + + known_ransomware_campaign_use: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ExploitPredictionScoringSystem(proto.Message): + r""" + + Attributes: + percentile (float): + The percentile of the current score, the + proportion of all scored vulnerabilities with + the same or a lower EPSS score + score (float): + The EPSS score representing the probability [0-1] of + exploitation in the wild in the next 30 days + """ + + percentile: float = proto.Field( + proto.DOUBLE, + number=1, + ) + score: float = proto.Field( + proto.DOUBLE, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/grafeas/grafeas/grafeas_v1/types/secret.py b/packages/grafeas/grafeas/grafeas_v1/types/secret.py index e36597dd1430..ee46d13aa952 100644 --- a/packages/grafeas/grafeas/grafeas_v1/types/secret.py +++ b/packages/grafeas/grafeas/grafeas_v1/types/secret.py @@ -17,6 +17,7 @@ from typing import MutableMapping, MutableSequence +from google.protobuf import any_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore @@ -43,13 +44,78 @@ class SecretKind(proto.Enum): SECRET_KIND_UNKNOWN (1): The secret kind is unknown. SECRET_KIND_GCP_SERVICE_ACCOUNT_KEY (2): - A GCP service account key per: + A Google Cloud service account key per: https://cloud.google.com/iam/docs/creating-managing-service-account-keys + SECRET_KIND_GCP_API_KEY (3): + A Google Cloud API key per: + + https://cloud.google.com/docs/authentication/api-keys + SECRET_KIND_GCP_OAUTH2_CLIENT_CREDENTIALS (4): + A Google Cloud OAuth2 client credentials per: + https://developers.google.com/identity/protocols/oauth2 + SECRET_KIND_GCP_OAUTH2_ACCESS_TOKEN (5): + A Google Cloud OAuth2 access token per: + + https://cloud.google.com/docs/authentication/token-types#access + SECRET_KIND_ANTHROPIC_ADMIN_API_KEY (6): + An Anthropic Admin API key. + SECRET_KIND_ANTHROPIC_API_KEY (7): + An Anthropic API key. + SECRET_KIND_AZURE_ACCESS_TOKEN (8): + An Azure access token. + SECRET_KIND_AZURE_IDENTITY_TOKEN (9): + An Azure Identity Platform ID token. + SECRET_KIND_DOCKER_HUB_PERSONAL_ACCESS_TOKEN (10): + A Docker Hub personal access token. + SECRET_KIND_GITHUB_APP_REFRESH_TOKEN (11): + A GitHub App refresh token. + SECRET_KIND_GITHUB_APP_SERVER_TO_SERVER_TOKEN (12): + A GitHub App server-to-server token. + SECRET_KIND_GITHUB_APP_USER_TO_SERVER_TOKEN (13): + A GitHub App user-to-server token. + SECRET_KIND_GITHUB_CLASSIC_PERSONAL_ACCESS_TOKEN (14): + A GitHub personal access token (classic). + SECRET_KIND_GITHUB_FINE_GRAINED_PERSONAL_ACCESS_TOKEN (15): + A GitHub fine-grained personal access token. + SECRET_KIND_GITHUB_OAUTH_TOKEN (16): + A GitHub OAuth token. + SECRET_KIND_HUGGINGFACE_API_KEY (17): + A Hugging Face API key. + SECRET_KIND_OPENAI_API_KEY (18): + An OpenAI API key. + SECRET_KIND_PERPLEXITY_API_KEY (19): + A Perplexity API key. + SECRET_KIND_STRIPE_SECRET_KEY (20): + A Stripe secret key. + SECRET_KIND_STRIPE_RESTRICTED_KEY (21): + A Stripe restricted key. + SECRET_KIND_STRIPE_WEBHOOK_SECRET (22): + A Stripe webhook secret. """ SECRET_KIND_UNSPECIFIED = 0 SECRET_KIND_UNKNOWN = 1 SECRET_KIND_GCP_SERVICE_ACCOUNT_KEY = 2 + SECRET_KIND_GCP_API_KEY = 3 + SECRET_KIND_GCP_OAUTH2_CLIENT_CREDENTIALS = 4 + SECRET_KIND_GCP_OAUTH2_ACCESS_TOKEN = 5 + SECRET_KIND_ANTHROPIC_ADMIN_API_KEY = 6 + SECRET_KIND_ANTHROPIC_API_KEY = 7 + SECRET_KIND_AZURE_ACCESS_TOKEN = 8 + SECRET_KIND_AZURE_IDENTITY_TOKEN = 9 + SECRET_KIND_DOCKER_HUB_PERSONAL_ACCESS_TOKEN = 10 + SECRET_KIND_GITHUB_APP_REFRESH_TOKEN = 11 + SECRET_KIND_GITHUB_APP_SERVER_TO_SERVER_TOKEN = 12 + SECRET_KIND_GITHUB_APP_USER_TO_SERVER_TOKEN = 13 + SECRET_KIND_GITHUB_CLASSIC_PERSONAL_ACCESS_TOKEN = 14 + SECRET_KIND_GITHUB_FINE_GRAINED_PERSONAL_ACCESS_TOKEN = 15 + SECRET_KIND_GITHUB_OAUTH_TOKEN = 16 + SECRET_KIND_HUGGINGFACE_API_KEY = 17 + SECRET_KIND_OPENAI_API_KEY = 18 + SECRET_KIND_PERPLEXITY_API_KEY = 19 + SECRET_KIND_STRIPE_SECRET_KEY = 20 + SECRET_KIND_STRIPE_RESTRICTED_KEY = 21 + SECRET_KIND_STRIPE_WEBHOOK_SECRET = 22 class SecretNote(proto.Message): @@ -66,6 +132,12 @@ class SecretOccurrence(proto.Message): Locations where the secret is detected. statuses (MutableSequence[grafeas.grafeas_v1.types.SecretStatus]): Status of the secret. + data (google.protobuf.any_pb2.Any): + Scan result of the secret. + digest (grafeas.grafeas_v1.types.Digest): + Hash value, typically a digest for the secret + data, that allows unique identification of a + specific secret. """ kind: "SecretKind" = proto.Field( @@ -83,6 +155,16 @@ class SecretOccurrence(proto.Message): number=3, message="SecretStatus", ) + data: any_pb2.Any = proto.Field( + proto.MESSAGE, + number=4, + message=any_pb2.Any, + ) + digest: common.Digest = proto.Field( + proto.MESSAGE, + number=5, + message=common.Digest, + ) class SecretLocation(proto.Message): diff --git a/packages/grafeas/grafeas/grafeas_v1/types/vulnerability.py b/packages/grafeas/grafeas/grafeas_v1/types/vulnerability.py index 42da348e7a7d..dce7860cc796 100644 --- a/packages/grafeas/grafeas/grafeas_v1/types/vulnerability.py +++ b/packages/grafeas/grafeas/grafeas_v1/types/vulnerability.py @@ -21,6 +21,7 @@ import proto # type: ignore from grafeas.grafeas_v1.types import common, cvss, package +from grafeas.grafeas_v1.types import risk as g_risk from grafeas.grafeas_v1.types import severity as g_severity from grafeas.grafeas_v1.types import vex @@ -361,6 +362,9 @@ class VulnerabilityOccurrence(proto.Message): extra_details (str): Occurrence-specific extra details about the vulnerability. + risk (grafeas.grafeas_v1.types.Risk): + Risk information about the vulnerability, + such as CISA, EPSS, etc. """ class PackageIssue(proto.Message): @@ -592,6 +596,11 @@ class VexAssessment(proto.Message): proto.STRING, number=14, ) + risk: g_risk.Risk = proto.Field( + proto.MESSAGE, + number=15, + message=g_risk.Risk, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/grafeas/tests/unit/gapic/grafeas_v1/test_grafeas.py b/packages/grafeas/tests/unit/gapic/grafeas_v1/test_grafeas.py index ca4b98d78b45..c45689672dad 100644 --- a/packages/grafeas/tests/unit/gapic/grafeas_v1/test_grafeas.py +++ b/packages/grafeas/tests/unit/gapic/grafeas_v1/test_grafeas.py @@ -78,6 +78,7 @@ intoto_statement, package, provenance, + risk, sbom, secret, severity, @@ -471,6 +472,7 @@ def test_list_occurrences(request_type, transport: str = "grpc"): # Designate an appropriate return value for the call. call.return_value = grafeas.ListOccurrencesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) response = client.list_occurrences(request) @@ -483,6 +485,7 @@ def test_list_occurrences(request_type, transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListOccurrencesPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_occurrences_non_empty_request_with_auto_populated_field(): @@ -615,6 +618,7 @@ async def test_list_occurrences_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( grafeas.ListOccurrencesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) response = await client.list_occurrences(request) @@ -628,6 +632,7 @@ async def test_list_occurrences_async( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListOccurrencesAsyncPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio @@ -3095,6 +3100,7 @@ def test_list_notes(request_type, transport: str = "grpc"): # Designate an appropriate return value for the call. call.return_value = grafeas.ListNotesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) response = client.list_notes(request) @@ -3107,6 +3113,7 @@ def test_list_notes(request_type, transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListNotesPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_notes_non_empty_request_with_auto_populated_field(): @@ -3235,6 +3242,7 @@ async def test_list_notes_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( grafeas.ListNotesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) response = await client.list_notes(request) @@ -3248,6 +3256,7 @@ async def test_list_notes_async( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListNotesAsyncPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio @@ -5773,6 +5782,7 @@ def test_list_occurrences_rest_required_fields( "filter", "page_size", "page_token", + "return_partial_success", ) ) jsonified_request.update(unset_fields) @@ -5835,6 +5845,7 @@ def test_list_occurrences_rest_unset_required_fields(): "filter", "pageSize", "pageToken", + "returnPartialSuccess", ) ) & set(("parent",)) @@ -7131,6 +7142,7 @@ def test_list_notes_rest_required_fields(request_type=grafeas.ListNotesRequest): "filter", "page_size", "page_token", + "return_partial_success", ) ) jsonified_request.update(unset_fields) @@ -7193,6 +7205,7 @@ def test_list_notes_rest_unset_required_fields(): "filter", "pageSize", "pageToken", + "returnPartialSuccess", ) ) & set(("parent",)) @@ -8757,6 +8770,7 @@ async def test_list_occurrences_empty_call_grpc_asyncio(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( grafeas.ListOccurrencesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) await client.list_occurrences(request=None) @@ -8966,6 +8980,7 @@ async def test_list_notes_empty_call_grpc_asyncio(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( grafeas.ListNotesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) await client.list_notes(request=None) @@ -9299,6 +9314,7 @@ def test_list_occurrences_rest_call_success(request_type): # Designate an appropriate value for the returned response. return_value = grafeas.ListOccurrencesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -9316,6 +9332,7 @@ def test_list_occurrences_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListOccurrencesPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -9612,6 +9629,12 @@ def test_create_occurrence_rest_call_success(request_type): "justification": {"justification_type": 1, "details": "details_value"}, }, "extra_details": "extra_details_value", + "risk": { + "cisa_kev": { + "known_ransomware_campaign_use": "known_ransomware_campaign_use_value" + }, + "epss": {"percentile": 0.1067, "score": 0.54}, + }, }, "build": { "provenance": { @@ -9842,6 +9865,7 @@ def test_create_occurrence_rest_call_success(request_type): "state": 1, "error": "error_value", }, + "files": [{"name": "name_value", "digest": {}}], }, "attestation": { "serialized_payload": b"serialized_payload_blob", @@ -9913,6 +9937,8 @@ def test_create_occurrence_rest_call_success(request_type): "kind": 1, "locations": [{"file_location": {}}], "statuses": [{"status": 1, "update_time": {}, "message": "message_value"}], + "data": {}, + "digest": {"algo": "algo_value", "digest_bytes": b"digest_bytes_blob"}, }, "envelope": {}, } @@ -10331,6 +10357,12 @@ def test_update_occurrence_rest_call_success(request_type): "justification": {"justification_type": 1, "details": "details_value"}, }, "extra_details": "extra_details_value", + "risk": { + "cisa_kev": { + "known_ransomware_campaign_use": "known_ransomware_campaign_use_value" + }, + "epss": {"percentile": 0.1067, "score": 0.54}, + }, }, "build": { "provenance": { @@ -10561,6 +10593,7 @@ def test_update_occurrence_rest_call_success(request_type): "state": 1, "error": "error_value", }, + "files": [{"name": "name_value", "digest": {}}], }, "attestation": { "serialized_payload": b"serialized_payload_blob", @@ -10632,6 +10665,8 @@ def test_update_occurrence_rest_call_success(request_type): "kind": 1, "locations": [{"file_location": {}}], "statuses": [{"status": 1, "update_time": {}, "message": "message_value"}], + "data": {}, + "digest": {"algo": "algo_value", "digest_bytes": b"digest_bytes_blob"}, }, "envelope": {}, } @@ -11102,6 +11137,7 @@ def test_list_notes_rest_call_success(request_type): # Designate an appropriate value for the returned response. return_value = grafeas.ListNotesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -11119,6 +11155,7 @@ def test_list_notes_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListNotesPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False])